WIP
This commit is contained in:
parent
cf23a8ba97
commit
d1cebaca1a
@ -206,7 +206,12 @@ export class ProcessorService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Commit and push changes if any workitems were updated
|
if (USE_LOCAL_REPO) {
|
||||||
|
console.log('Skipping commit and push changes to main repository: Using local repository');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit and push changes if any qworkitems were updated
|
||||||
if (updatedAnyWorkitem) {
|
if (updatedAnyWorkitem) {
|
||||||
console.log('Committing changes to workitem files...');
|
console.log('Committing changes to workitem files...');
|
||||||
await this.sharedRepositoryService.commitChanges(
|
await this.sharedRepositoryService.commitChanges(
|
||||||
|
@ -199,13 +199,14 @@ export class ProjectWorkitemsService {
|
|||||||
guidelinePaths
|
guidelinePaths
|
||||||
.map(g => g.trim())
|
.map(g => g.trim())
|
||||||
.forEach(fileName => {
|
.forEach(fileName => {
|
||||||
|
console.debug("Collected guideline file: " + fileName);
|
||||||
const filePath = path.join(projectRepoPath, fileName);
|
const filePath = path.join(projectRepoPath, fileName);
|
||||||
if (fs.existsSync(filePath)) {
|
if (fs.existsSync(filePath)) {
|
||||||
relevantFiles[fileName] = fs.readFileSync(filePath, 'utf-8');
|
relevantFiles[fileName] = fs.readFileSync(filePath, 'utf-8');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log(`ProjectWorkitemsService: Collected ${Object.keys(relevantFiles).length} relevant files for workitem ${workitem.name}`);
|
console.log(`ProjectWorkitemsService: Collected ${Object.keys(relevantFiles).length} guideline files for workitem ${workitem.name}`);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error collecting relevant files for workitem ${workitem.name}:`, error);
|
console.error(`Error collecting relevant files for workitem ${workitem.name}:`, error);
|
||||||
}
|
}
|
||||||
@ -270,10 +271,16 @@ export class ProjectWorkitemsService {
|
|||||||
DRY_RUN_SKIP_GEMINI
|
DRY_RUN_SKIP_GEMINI
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const workItemPrompt = `\n`
|
||||||
|
+ `---\n`
|
||||||
|
+ `Here is the work item prompt: ${workitemName}\n`
|
||||||
|
+ `${workitemContent}\n`
|
||||||
|
+ `---\n`;
|
||||||
|
|
||||||
// Process the model stream
|
// Process the model stream
|
||||||
const result = await geminiFileSystemService.processModelStream(
|
const result = await geminiFileSystemService.processModelStream(
|
||||||
guidelines,
|
guidelines,
|
||||||
workitemContent,
|
workItemPrompt,
|
||||||
projectRepoPath
|
projectRepoPath
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -20,7 +20,7 @@ export interface FunctionArgs {
|
|||||||
dirPath?: string;
|
dirPath?: string;
|
||||||
searchString?: string;
|
searchString?: string;
|
||||||
filePattern?: string;
|
filePattern?: string;
|
||||||
decision?: 'create' | 'update' | 'delete' | 'skip';
|
outcome?: 'create' | 'update' | 'delete' | 'skip';
|
||||||
reason?: string;
|
reason?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -144,7 +144,7 @@ export class GeminiFileSystemService {
|
|||||||
properties: {
|
properties: {
|
||||||
searchString: {
|
searchString: {
|
||||||
type: FunctionDeclarationSchemaType.STRING,
|
type: FunctionDeclarationSchemaType.STRING,
|
||||||
description: "String to search for in project files"
|
description: "String to search for in project files (case sensitive)"
|
||||||
},
|
},
|
||||||
filePattern: {
|
filePattern: {
|
||||||
type: FunctionDeclarationSchemaType.STRING,
|
type: FunctionDeclarationSchemaType.STRING,
|
||||||
@ -169,22 +169,22 @@ export class GeminiFileSystemService {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "makeDecision",
|
name: "reportFinalOutcome",
|
||||||
description: "State your decision about implementing the workitem",
|
description: "Submit the final outcome for compliance with guidelines. Can only be called once.",
|
||||||
parameters: {
|
parameters: {
|
||||||
type: FunctionDeclarationSchemaType.OBJECT,
|
type: FunctionDeclarationSchemaType.OBJECT,
|
||||||
properties: {
|
properties: {
|
||||||
decision: {
|
outcome: {
|
||||||
type: FunctionDeclarationSchemaType.STRING,
|
type: FunctionDeclarationSchemaType.STRING,
|
||||||
description: "Your decision: 'create', 'update', 'delete', or 'skip'",
|
description: "The final outcome: 'create', 'update', 'delete', or 'skip'",
|
||||||
enum: ["create", "update", "delete", "skip"]
|
enum: ["create", "update", "delete", "skip"]
|
||||||
},
|
},
|
||||||
reason: {
|
reason: {
|
||||||
type: FunctionDeclarationSchemaType.STRING,
|
type: FunctionDeclarationSchemaType.STRING,
|
||||||
description: "Reason for your decision"
|
description: "Reason for this outcome. For instance, 'create' when files have been created, 'skip' when no files has been created, or 'update' when files have been updated."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
required: ["decision", "reason"]
|
required: ["outcome", "reason"]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -322,7 +322,7 @@ export class GeminiFileSystemService {
|
|||||||
}
|
}
|
||||||
} else if (entry.isFile()) {
|
} else if (entry.isFile()) {
|
||||||
// Check if the file matches the pattern
|
// Check if the file matches the pattern
|
||||||
if (!filePattern || this.matchesPattern(entry.name, filePattern)) {
|
if (!filePattern || this.matchesPattern(relativePath, filePattern)) {
|
||||||
searchInFile(fullPath, relativePath);
|
searchInFile(fullPath, relativePath);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -335,6 +335,7 @@ export class GeminiFileSystemService {
|
|||||||
// Start the search from the root path
|
// Start the search from the root path
|
||||||
searchInDirectory(rootPath, rootPath);
|
searchInDirectory(rootPath, rootPath);
|
||||||
|
|
||||||
|
console.debug(`Search returned ${results.length} results`)
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -387,8 +388,12 @@ export class GeminiFileSystemService {
|
|||||||
|
|
||||||
// Create the prompt
|
// Create the prompt
|
||||||
const prompt = `
|
const prompt = `
|
||||||
|
Here is your guideline:
|
||||||
|
|
||||||
${guidelines}
|
${guidelines}
|
||||||
|
|
||||||
|
Additional content:
|
||||||
|
|
||||||
${additionalContent}
|
${additionalContent}
|
||||||
|
|
||||||
You have access to the following function calls to help you understand the project structure and create implementations:
|
You have access to the following function calls to help you understand the project structure and create implementations:
|
||||||
@ -398,11 +403,13 @@ You have access to the following function calls to help you understand the proje
|
|||||||
- listFiles(dirPath): List files in a directory in the project repository
|
- listFiles(dirPath): List files in a directory in the project repository
|
||||||
- grepFiles(searchString, filePattern): Search for a string in project files, optionally filtered by a file pattern
|
- grepFiles(searchString, filePattern): Search for a string in project files, optionally filtered by a file pattern
|
||||||
- deleteFile(filePath): Delete a file from the project repository
|
- deleteFile(filePath): Delete a file from the project repository
|
||||||
- makeDecision(decision, reason): State your decision about implementing the workitem. Decision must be one of: 'create', 'update', 'delete', 'skip'
|
|
||||||
|
|
||||||
IMPORTANT!!: First use the function calls above to actually implement the workitem. Make all necessary function calls to fully implement the workitem.
|
IMPORTANT: First use the function calls above to comply with the guidelines. Create, update, or delete all required files.
|
||||||
|
|
||||||
After you have implemented the workitem using function calls, use the makeDecision function to state your final decision with a reason.
|
Then, once finished with all the guidelines above, use this function once to report the overall outcome:
|
||||||
|
- reportFinalOutcome(outcome, reason): Outcome must be one of: 'create', 'update', 'delete', 'skip'
|
||||||
|
|
||||||
|
You won't be able to update other files once you've made a decision.
|
||||||
`;
|
`;
|
||||||
|
|
||||||
// Instantiate the model with our file operation tools
|
// Instantiate the model with our file operation tools
|
||||||
@ -410,9 +417,9 @@ After you have implemented the workitem using function calls, use the makeDecisi
|
|||||||
model: this.model,
|
model: this.model,
|
||||||
tools: this.fileOperationTools,
|
tools: this.fileOperationTools,
|
||||||
generation_config: {
|
generation_config: {
|
||||||
temperature: 0.1, // Very low temperature for more deterministic responses
|
temperature: 0.3, // Very low temperature for more deterministic responses
|
||||||
top_p: 0.95, // Higher top_p to allow more diverse completions when needed
|
top_p: 0.8, // Higher top_p to allow more diverse completions when needed
|
||||||
top_k: 40, // Consider only the top 40 tokens
|
top_k: 60, // Consider only the top 40 tokens
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -498,14 +505,14 @@ After you have implemented the workitem using function calls, use the makeDecisi
|
|||||||
// Track the file deleted
|
// Track the file deleted
|
||||||
filesDeleted.push(functionArgs.filePath!);
|
filesDeleted.push(functionArgs.filePath!);
|
||||||
break;
|
break;
|
||||||
case 'makeDecision':
|
case 'reportFinalOutcome':
|
||||||
console.debug(`- received makeDecision function call: ${functionArgs.decision} - ${functionArgs.reason}`);
|
console.debug(`- received reportFinalOutcome function call: ${functionArgs.outcome} - ${functionArgs.reason}`);
|
||||||
// Store the decision
|
// Store the decision
|
||||||
decision = {
|
decision = {
|
||||||
decision: functionArgs.decision!,
|
decision: functionArgs.outcome!,
|
||||||
reason: functionArgs.reason!
|
reason: functionArgs.reason!
|
||||||
};
|
};
|
||||||
functionResponse = `Decision recorded: ${functionArgs.decision} - ${functionArgs.reason}`;
|
functionResponse = `Outcome recorded: ${functionArgs.outcome} - ${functionArgs.reason}`;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw new Error(`Unknown function: ${functionName}`);
|
throw new Error(`Unknown function: ${functionName}`);
|
||||||
@ -532,6 +539,10 @@ After you have implemented the workitem using function calls, use the makeDecisi
|
|||||||
modelResponses.push(nextResult.textContent);
|
modelResponses.push(nextResult.textContent);
|
||||||
}
|
}
|
||||||
if (nextResult.functionCall) {
|
if (nextResult.functionCall) {
|
||||||
|
if (decision != null) {
|
||||||
|
console.warn(`Received another function call for ${nextResult.functionCall.name}, but a decision hsa been recorded. Ignoring stream`);
|
||||||
|
break;
|
||||||
|
}
|
||||||
pendingFunctionCalls.push(nextResult.functionCall);
|
pendingFunctionCalls.push(nextResult.functionCall);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -560,14 +571,19 @@ After you have implemented the workitem using function calls, use the makeDecisi
|
|||||||
modelResponses.push(nextResult.textContent);
|
modelResponses.push(nextResult.textContent);
|
||||||
}
|
}
|
||||||
if (nextResult.functionCall) {
|
if (nextResult.functionCall) {
|
||||||
|
if (decision != null) {
|
||||||
|
console.warn(`Received another function call for ${nextResult.functionCall.name}, but a decision hsa been recorded. Ignoring stream`);
|
||||||
|
break;
|
||||||
|
}
|
||||||
pendingFunctionCalls.push(nextResult.functionCall);
|
pendingFunctionCalls.push(nextResult.functionCall);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If no explicit decision was made using the makeDecision function, try to parse it from the text
|
// If no explicit decision was made using the reportFinalOutcome function, try to parse it from the text
|
||||||
if (!decision) {
|
if (!decision) {
|
||||||
|
console.warn(`No decision function call made during the stream session`);
|
||||||
try {
|
try {
|
||||||
// Try to parse a JSON decision from the text
|
// Try to parse a JSON decision from the text
|
||||||
const jsonMatch = finalResponse.match(/\{[\s\S]*"decision"[\s\S]*\}/);
|
const jsonMatch = finalResponse.match(/\{[\s\S]*"decision"[\s\S]*\}/);
|
||||||
@ -579,11 +595,11 @@ After you have implemented the workitem using function calls, use the makeDecisi
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
console.debug(`- Completed gemini stream processing. Final response: ${decision}`);
|
console.debug(`- Completed gemini stream processing. Final response: ${decision?.decision} - ${decision?.reason}`);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
text: finalResponse,
|
text: finalResponse,
|
||||||
decision: decision,
|
decision: decision ?? {decision: "skip", reason: "No decision received/parsed"},
|
||||||
modelResponses: modelResponses,
|
modelResponses: modelResponses,
|
||||||
filesWritten: filesWritten,
|
filesWritten: filesWritten,
|
||||||
filesDeleted: filesDeleted
|
filesDeleted: filesDeleted
|
||||||
|
@ -93,15 +93,12 @@ ${gitPatch}
|
|||||||
You are tasked with creating a pull request description for changes to test specifications.
|
You are tasked with creating a pull request description for changes to test specifications.
|
||||||
|
|
||||||
The following is a summary of the changes made:
|
The following is a summary of the changes made:
|
||||||
|
|
||||||
${description}
|
${description}
|
||||||
${gitPatch && gitPatch !== "No changes detected." ? gitPatchSection : ''}
|
${gitPatch && gitPatch !== "No changes detected." ? gitPatchSection : ''}
|
||||||
|
|
||||||
Create a clear, professional pull request description that:
|
Create a clear, professional pull request description that:
|
||||||
1. Explains that this PR was automatically generated
|
Keeps the description concise but informative
|
||||||
2. Summarizes the changes (added, updated, deleted, and failed workitems)
|
|
||||||
3. If code changes are provided, analyze them and include a summary of the key changes
|
|
||||||
4. Uses markdown formatting for better readability
|
|
||||||
5. Keeps the description concise but informative
|
|
||||||
|
|
||||||
The pull request description should be ready to use without further editing.
|
The pull request description should be ready to use without further editing.
|
||||||
`;
|
`;
|
||||||
|
@ -4,180 +4,180 @@
|
|||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import * as os from 'os';
|
import * as os from 'os';
|
||||||
import { simpleGit, SimpleGit } from 'simple-git';
|
import {simpleGit, SimpleGit} from 'simple-git';
|
||||||
import { Project, RepoCredentials } from '../types';
|
import {Project, RepoCredentials} from '../types';
|
||||||
|
|
||||||
export class RepositoryService {
|
export class RepositoryService {
|
||||||
private baseDir: string;
|
private baseDir: string;
|
||||||
|
|
||||||
constructor(baseDir?: string) {
|
constructor(baseDir?: string) {
|
||||||
this.baseDir = baseDir || path.join(os.tmpdir(), 'shared-repo-service');
|
this.baseDir = baseDir || path.join(os.tmpdir(), 'shared-repo-service');
|
||||||
|
|
||||||
// Ensure base directory exists
|
// Ensure base directory exists
|
||||||
if (!fs.existsSync(this.baseDir)) {
|
if (!fs.existsSync(this.baseDir)) {
|
||||||
fs.mkdirSync(this.baseDir, { recursive: true });
|
fs.mkdirSync(this.baseDir, {recursive: true});
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clone the main repository containing prompts
|
|
||||||
* @param repoUrl URL of the repository
|
|
||||||
* @param credentials Optional credentials for private repositories
|
|
||||||
* @returns Path to the cloned repository
|
|
||||||
*/
|
|
||||||
async cloneMainRepository(repoUrl: string, credentials?: RepoCredentials): Promise<string> {
|
|
||||||
const repoDir = path.join(this.baseDir, 'main-repo');
|
|
||||||
|
|
||||||
// Clean up existing directory if it exists
|
|
||||||
if (fs.existsSync(repoDir)) {
|
|
||||||
fs.rmSync(repoDir, { recursive: true, force: true });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fs.mkdirSync(repoDir, { recursive: true });
|
/**
|
||||||
|
* Clone the main repository containing prompts
|
||||||
|
* @param repoUrl URL of the repository
|
||||||
|
* @param credentials Optional credentials for private repositories
|
||||||
|
* @returns Path to the cloned repository
|
||||||
|
*/
|
||||||
|
async cloneMainRepository(repoUrl: string, credentials?: RepoCredentials): Promise<string> {
|
||||||
|
const repoDir = path.join(this.baseDir, 'main-repo');
|
||||||
|
|
||||||
// Configure git with credentials if provided
|
// Clean up existing directory if it exists
|
||||||
const git = this.configureGit(repoDir, credentials);
|
if (fs.existsSync(repoDir)) {
|
||||||
|
fs.rmSync(repoDir, {recursive: true, force: true});
|
||||||
|
}
|
||||||
|
|
||||||
// Clone the repository
|
fs.mkdirSync(repoDir, {recursive: true});
|
||||||
await git.clone(repoUrl, repoDir);
|
|
||||||
|
|
||||||
return repoDir;
|
// Configure git with credentials if provided
|
||||||
}
|
const git = this.configureGit(repoDir, credentials);
|
||||||
|
|
||||||
/**
|
// Clone the repository
|
||||||
* Clone a project repository
|
await git.clone(repoUrl, repoDir);
|
||||||
* @param project Project information
|
|
||||||
* @param credentials Optional credentials for private repositories
|
return repoDir;
|
||||||
* @returns Path to the cloned repository
|
|
||||||
*/
|
|
||||||
async cloneProjectRepository(project: Project, credentials?: RepoCredentials): Promise<string> {
|
|
||||||
if (!project.repoUrl) {
|
|
||||||
throw new Error(`Repository URL not found for project ${project.name}`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const projectRepoDir = path.join(this.baseDir, `project-${project.name}`);
|
/**
|
||||||
|
* Clone a project repository
|
||||||
|
* @param project Project information
|
||||||
|
* @param credentials Optional credentials for private repositories
|
||||||
|
* @returns Path to the cloned repository
|
||||||
|
*/
|
||||||
|
async cloneProjectRepository(project: Project, credentials?: RepoCredentials): Promise<string> {
|
||||||
|
if (!project.repoUrl) {
|
||||||
|
throw new Error(`Repository URL not found for project ${project.name}`);
|
||||||
|
}
|
||||||
|
|
||||||
// Clean up existing directory if it exists
|
const projectRepoDir = path.join(this.baseDir, `project-${project.name}`);
|
||||||
if (fs.existsSync(projectRepoDir)) {
|
|
||||||
fs.rmSync(projectRepoDir, { recursive: true, force: true });
|
// Clean up existing directory if it exists
|
||||||
|
if (fs.existsSync(projectRepoDir)) {
|
||||||
|
fs.rmSync(projectRepoDir, {recursive: true, force: true});
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.mkdirSync(projectRepoDir, {recursive: true});
|
||||||
|
|
||||||
|
// Configure git with credentials if provided
|
||||||
|
const git = this.configureGit(projectRepoDir, credentials);
|
||||||
|
|
||||||
|
// Clone the repository
|
||||||
|
await git.clone(project.repoUrl, projectRepoDir);
|
||||||
|
|
||||||
|
// Checkout the target branch if specified
|
||||||
|
if (project.targetBranch) {
|
||||||
|
await this.checkoutBranch(projectRepoDir, project.targetBranch);
|
||||||
|
}
|
||||||
|
|
||||||
|
return projectRepoDir;
|
||||||
}
|
}
|
||||||
|
|
||||||
fs.mkdirSync(projectRepoDir, { recursive: true });
|
/**
|
||||||
|
* Create a new branch in a repository
|
||||||
// Configure git with credentials if provided
|
* @param repoDir Path to the repository
|
||||||
const git = this.configureGit(projectRepoDir, credentials);
|
* @param branchName Name of the branch to create
|
||||||
|
*/
|
||||||
// Clone the repository
|
async createBranch(repoDir: string, branchName: string): Promise<void> {
|
||||||
await git.clone(project.repoUrl, projectRepoDir);
|
const git = simpleGit(repoDir);
|
||||||
|
await git.checkoutLocalBranch(branchName);
|
||||||
// Checkout the target branch if specified
|
|
||||||
if (project.targetBranch) {
|
|
||||||
await this.checkoutBranch(projectRepoDir, project.targetBranch);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return projectRepoDir;
|
/**
|
||||||
}
|
* Commit changes to a repository
|
||||||
|
* @param repoDir Path to the repository
|
||||||
/**
|
* @param message Commit message
|
||||||
* Create a new branch in a repository
|
*/
|
||||||
* @param repoDir Path to the repository
|
async commitChanges(repoDir: string, message: string): Promise<void> {
|
||||||
* @param branchName Name of the branch to create
|
const git = simpleGit(repoDir);
|
||||||
*/
|
await git.add('.');
|
||||||
async createBranch(repoDir: string, branchName: string): Promise<void> {
|
await git.commit(message);
|
||||||
const git = simpleGit(repoDir);
|
|
||||||
await git.checkoutLocalBranch(branchName);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Commit changes to a repository
|
|
||||||
* @param repoDir Path to the repository
|
|
||||||
* @param message Commit message
|
|
||||||
*/
|
|
||||||
async commitChanges(repoDir: string, message: string): Promise<void> {
|
|
||||||
const git = simpleGit(repoDir);
|
|
||||||
await git.add('.');
|
|
||||||
await git.commit(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Push changes to a repository
|
|
||||||
* @param repoDir Path to the repository
|
|
||||||
* @param branchName Name of the branch to push
|
|
||||||
* @param credentials Optional credentials for private repositories
|
|
||||||
*/
|
|
||||||
async pushChanges(repoDir: string, branchName: string, credentials?: RepoCredentials): Promise<void> {
|
|
||||||
const git = this.configureGit(repoDir, credentials);
|
|
||||||
await git.push('origin', branchName, ['--set-upstream']);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate a git patch of the changes in a repository
|
|
||||||
* @param repoDir Path to the repository
|
|
||||||
* @returns Git patch as a string
|
|
||||||
*/
|
|
||||||
async generateGitPatch(repoDir: string): Promise<string> {
|
|
||||||
const git = simpleGit(repoDir);
|
|
||||||
|
|
||||||
// Check if there are any changes
|
|
||||||
const status = await git.status();
|
|
||||||
if (status.files.length === 0) {
|
|
||||||
return "No changes detected.";
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generate a diff of all changes (staged and unstaged)
|
/**
|
||||||
const diff = await git.diff(['--staged', '--no-color']);
|
* Push changes to a repository
|
||||||
|
* @param repoDir Path to the repository
|
||||||
// Only get untracked diff if there are untracked files
|
* @param branchName Name of the branch to push
|
||||||
let untrackedDiff = '';
|
* @param credentials Optional credentials for private repositories
|
||||||
if (status.not_added && status.not_added.length > 0) {
|
*/
|
||||||
untrackedDiff = await git.diff(['--no-index', '/dev/null', ...status.not_added.map(file => path.join(repoDir, file))]).catch(() => '');
|
async pushChanges(repoDir: string, branchName: string, credentials?: RepoCredentials): Promise<void> {
|
||||||
|
const git = this.configureGit(repoDir, credentials);
|
||||||
|
await git.push('origin', branchName, ['--set-upstream']);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Combine the diffs
|
/**
|
||||||
let patch = diff;
|
* Generate a git patch of the changes in a repository
|
||||||
if (untrackedDiff) {
|
* @param repoDir Path to the repository
|
||||||
patch += '\n\n' + untrackedDiff;
|
* @returns Git patch as a string
|
||||||
|
*/
|
||||||
|
async generateGitPatch(repoDir: string): Promise<string> {
|
||||||
|
const git = simpleGit(repoDir);
|
||||||
|
|
||||||
|
// Check if there are any changes
|
||||||
|
const status = await git.status();
|
||||||
|
if (status.files.length === 0) {
|
||||||
|
return "No changes detected.";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate a diff of all changes (staged and unstaged)
|
||||||
|
const diff = await git.diff(['--staged', '--no-color']);
|
||||||
|
|
||||||
|
// Only get untracked diff if there are untracked files
|
||||||
|
let untrackedDiff = '';
|
||||||
|
if (status.not_added && status.not_added.length > 0) {
|
||||||
|
untrackedDiff = await git.diff(['--no-index', '/dev/null', ...status.not_added.map(file => path.join(repoDir, file))]).catch(() => '');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Combine the diffs
|
||||||
|
let patch = diff;
|
||||||
|
if (untrackedDiff) {
|
||||||
|
patch += '\n\n' + untrackedDiff;
|
||||||
|
}
|
||||||
|
|
||||||
|
return patch || "No changes detected.";
|
||||||
}
|
}
|
||||||
|
|
||||||
return patch || "No changes detected.";
|
/**
|
||||||
}
|
* Checkout an existing branch in a repository
|
||||||
|
* @param repoDir Path to the repository
|
||||||
/**
|
* @param branchName Name of the branch to checkout
|
||||||
* Checkout an existing branch in a repository
|
* @throws Error if checkout fails
|
||||||
* @param repoDir Path to the repository
|
*/
|
||||||
* @param branchName Name of the branch to checkout
|
async checkoutBranch(repoDir: string, branchName: string): Promise<void> {
|
||||||
* @throws Error if checkout fails
|
const git = simpleGit(repoDir);
|
||||||
*/
|
try {
|
||||||
async checkoutBranch(repoDir: string, branchName: string): Promise<void> {
|
await git.checkout(branchName);
|
||||||
const git = simpleGit(repoDir);
|
} catch (error) {
|
||||||
try {
|
throw new Error(`Failed to checkout branch ${branchName}: ${error instanceof Error ? error.message : String(error)}`);
|
||||||
await git.checkout(branchName);
|
}
|
||||||
} catch (error) {
|
|
||||||
throw new Error(`Failed to checkout branch ${branchName}: ${error instanceof Error ? error.message : String(error)}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Configure git with credentials
|
|
||||||
* @param repoDir Path to the repository
|
|
||||||
* @param credentials Credentials for authentication
|
|
||||||
* @returns Configured SimpleGit instance
|
|
||||||
*/
|
|
||||||
private configureGit(repoDir: string, credentials?: RepoCredentials): SimpleGit {
|
|
||||||
const git = simpleGit(repoDir);
|
|
||||||
|
|
||||||
if (credentials) {
|
|
||||||
if (credentials.type === 'username-password' && credentials.username && credentials.password) {
|
|
||||||
// For HTTPS URLs with username/password
|
|
||||||
const credentialHelper = `!f() { echo "username=${credentials.username}"; echo "password=${credentials.password}"; }; f`;
|
|
||||||
git.addConfig('credential.helper', credentialHelper, false, 'global');
|
|
||||||
} else if (credentials.type === 'token' && credentials.token) {
|
|
||||||
// For HTTPS URLs with token
|
|
||||||
const credentialHelper = `!f() { echo "password=${credentials.token}"; }; f`;
|
|
||||||
git.addConfig('credential.helper', credentialHelper, false, 'global');
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return git;
|
/**
|
||||||
}
|
* Configure git with credentials
|
||||||
|
* @param repoDir Path to the repository
|
||||||
|
* @param credentials Credentials for authentication
|
||||||
|
* @returns Configured SimpleGit instance
|
||||||
|
*/
|
||||||
|
private configureGit(repoDir: string, credentials?: RepoCredentials): SimpleGit {
|
||||||
|
const git = simpleGit(repoDir);
|
||||||
|
|
||||||
|
if (credentials) {
|
||||||
|
if (credentials.type === 'username-password' && credentials.username && credentials.password) {
|
||||||
|
// For HTTPS URLs with username/password
|
||||||
|
const credentialHelper = `!f() { echo "username=${credentials.username}"; echo "password=${credentials.password}"; }; f`;
|
||||||
|
git.addConfig('credential.helper', credentialHelper, false, 'global');
|
||||||
|
} else if (credentials.type === 'token' && credentials.token) {
|
||||||
|
// For HTTPS URLs with token
|
||||||
|
const credentialHelper = `!f() { echo "password=${credentials.token}"; }; f`;
|
||||||
|
git.addConfig('credential.helper', credentialHelper, false, 'global');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return git;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -32,7 +32,7 @@ export class ProjectTestSpecsService {
|
|||||||
// Read project guidelines
|
// Read project guidelines
|
||||||
const projectGuidelines = await this.projectService.readProjectGuidelines(project.path);
|
const projectGuidelines = await this.projectService.readProjectGuidelines(project.path);
|
||||||
|
|
||||||
const result = await this.processTestSpec(project, projectRepoPath, projectGuidelines);
|
const result = await this.generateTestSpecs(project, projectRepoPath, projectGuidelines);
|
||||||
|
|
||||||
// Generate git patch if any files were written
|
// Generate git patch if any files were written
|
||||||
let gitPatch: string | undefined = undefined;
|
let gitPatch: string | undefined = undefined;
|
||||||
@ -64,11 +64,10 @@ export class ProjectTestSpecsService {
|
|||||||
* Process a test spec using Gemini
|
* Process a test spec using Gemini
|
||||||
* @param project Project containing the test spec
|
* @param project Project containing the test spec
|
||||||
* @param projectRepoPath Path to the project repository
|
* @param projectRepoPath Path to the project repository
|
||||||
* @param testSpec Test spec to process
|
|
||||||
* @param projectGuidelines Project guidelines
|
* @param projectGuidelines Project guidelines
|
||||||
* @returns Result of the processing
|
* @returns Result of the processing
|
||||||
*/
|
*/
|
||||||
private async processTestSpec(
|
private async generateTestSpecs(
|
||||||
project: Project,
|
project: Project,
|
||||||
projectRepoPath: string,
|
projectRepoPath: string,
|
||||||
projectGuidelines: string
|
projectGuidelines: string
|
||||||
@ -78,7 +77,7 @@ export class ProjectTestSpecsService {
|
|||||||
const relevantFiles = await this.collectRelevantFiles(project, projectRepoPath);
|
const relevantFiles = await this.collectRelevantFiles(project, projectRepoPath);
|
||||||
|
|
||||||
// Let Gemini generate the implementation
|
// Let Gemini generate the implementation
|
||||||
const result = await this.generateImplementations(
|
const result = await this.generateAllTestSpecs(
|
||||||
projectRepoPath,
|
projectRepoPath,
|
||||||
projectGuidelines,
|
projectGuidelines,
|
||||||
relevantFiles
|
relevantFiles
|
||||||
@ -133,6 +132,7 @@ export class ProjectTestSpecsService {
|
|||||||
guidelinePaths
|
guidelinePaths
|
||||||
.map(g => g.trim())
|
.map(g => g.trim())
|
||||||
.forEach(fileName => {
|
.forEach(fileName => {
|
||||||
|
console.debug("Collected guideline file: " + fileName);
|
||||||
const filePath = path.join(projectRepoPath, fileName);
|
const filePath = path.join(projectRepoPath, fileName);
|
||||||
if (fs.existsSync(filePath)) {
|
if (fs.existsSync(filePath)) {
|
||||||
relevantFiles[fileName] = fs.readFileSync(filePath, 'utf-8');
|
relevantFiles[fileName] = fs.readFileSync(filePath, 'utf-8');
|
||||||
@ -151,11 +151,10 @@ export class ProjectTestSpecsService {
|
|||||||
* Generate implementation using Gemini API
|
* Generate implementation using Gemini API
|
||||||
* @param projectRepoPath Path to the project repository
|
* @param projectRepoPath Path to the project repository
|
||||||
* @param guidelines Project guidelines
|
* @param guidelines Project guidelines
|
||||||
* @param testSpec Test spec to implement
|
|
||||||
* @param relevantFiles Additional relevant files to include in the prompt
|
* @param relevantFiles Additional relevant files to include in the prompt
|
||||||
* @returns Object containing the generated text, parsed decision, and files written/deleted
|
* @returns Object containing the generated text, parsed decision, and files written/deleted
|
||||||
*/
|
*/
|
||||||
private async generateImplementations(
|
private async generateAllTestSpecs(
|
||||||
projectRepoPath: string,
|
projectRepoPath: string,
|
||||||
guidelines: string,
|
guidelines: string,
|
||||||
relevantFiles: Record<string, string> = {}
|
relevantFiles: Record<string, string> = {}
|
||||||
|
@ -5,5 +5,5 @@ Nitro backend server in quarkus
|
|||||||
- [x] Repo host: https://gitea.fteamdev.valuya.be/
|
- [x] Repo host: https://gitea.fteamdev.valuya.be/
|
||||||
- [x] Repo url: https://gitea.fteamdev.valuya.be/cghislai/nitro-back.git
|
- [x] Repo url: https://gitea.fteamdev.valuya.be/cghislai/nitro-back.git
|
||||||
- [x] Target branch: main
|
- [x] Target branch: main
|
||||||
- [ ] AI guidelines:
|
- [x] AI guidelines: nitro-it/src/test/resources/workitems/AI_DEFINITION.md
|
||||||
- [x] Jira component: nitro
|
- [x] Jira component: nitro
|
||||||
|
@ -0,0 +1,23 @@
|
|||||||
|
## Document archiving
|
||||||
|
|
||||||
|
Nitro admins want to be able to archive documents in every status. Once the document reaches the
|
||||||
|
status ARCHIVED, it cannot be COMPLETED afterwards.
|
||||||
|
|
||||||
|
When a document is archived using the dedicated endpoint, its status should be set ARCHIVED directly.
|
||||||
|
|
||||||
|
When a document in the status TO_EXPORT is archived, and that an export was in progress at that time,
|
||||||
|
the export should complete, but the document status must not change and the document must not be
|
||||||
|
set problematic once the export completes.
|
||||||
|
|
||||||
|
Only users that are superAdmins may archive documents.
|
||||||
|
|
||||||
|
- [ ] Jira: NITRO-0003
|
||||||
|
- [ ] Implementation:
|
||||||
|
- [ ] Pull Request:
|
||||||
|
- [x] Active
|
||||||
|
|
||||||
|
### Log
|
||||||
|
|
||||||
|
2025-06-08T09:58:06.287Z - Workitem has been implemented.
|
||||||
|
|
||||||
|
- Created nitro-it/src/test/resources/workitems/2025-06-08-document-archvigin.feature
|
@ -11,5 +11,10 @@ The nitro-back backend should have a /test endpoint implemented returning the js
|
|||||||
|
|
||||||
### Log
|
### Log
|
||||||
|
|
||||||
|
2025-06-08T09:58:26.902Z - Workitem has been updated.
|
||||||
|
- Created nitro-it/src/test/resources/workitems/test_workitem.feature
|
||||||
|
PR: https://gitea.fteamdev.valuya.be/cghislai/nitro-back/pulls/1
|
||||||
|
|
||||||
|
|
||||||
2025-06-08T07:36:00.901Z - Workitem has been implemented.
|
2025-06-08T07:36:00.901Z - Workitem has been implemented.
|
||||||
- Created nitro-it/src/test/resources/workitems/test_workitem.feature
|
- Created nitro-it/src/test/resources/workitems/test_workitem.feature
|
||||||
|
@ -1,7 +1,4 @@
|
|||||||
## Test spec implementation
|
This is your guideline for the implementation of the feature file:
|
||||||
|
|
||||||
- Iterate over cucumber feature definitions in the `nitro-it/src/test/resources/workitems/` folder.
|
|
||||||
- For each of them, a corresponding test implementation should be created if it does not exist.
|
|
||||||
- Test implementations should be created in the `nitro-it/src/test/java/be/fiscalteam/nitro/bdd` folder, following the
|
|
||||||
same structure as the feature definition files. One test file per feature definition.
|
|
||||||
|
|
||||||
|
- Iterate over cucumber ".feature" definition files in the `nitro-it/src/test/resources/workitems/` directory.
|
||||||
|
- For each of them create all required files to implement the feature.
|
||||||
|
@ -5,5 +5,5 @@ Nitro backend server in quarkus
|
|||||||
- [x] Repo host: https://gitea.fteamdev.valuya.be/
|
- [x] Repo host: https://gitea.fteamdev.valuya.be/
|
||||||
- [x] Repo url: https://gitea.fteamdev.valuya.be/cghislai/nitro-back.git
|
- [x] Repo url: https://gitea.fteamdev.valuya.be/cghislai/nitro-back.git
|
||||||
- [x] Target branch: main
|
- [x] Target branch: main
|
||||||
- [ ] AI guidelines:
|
- [x] AI guidelines: nitro-it/src/test/resources/workitems/AI_IMPLEMENTATION.md
|
||||||
- [x] Jira component: nitro
|
- [x] Jira component: nitro
|
||||||
|
Loading…
x
Reference in New Issue
Block a user