feat: add in retry logic
All checks were successful
Dependabot Auto-Merge / dependabot (pull_request) Has been skipped
Dependabot Auto-Merge / devopsbot (pull_request) Has been skipped
Dependabot Auto-Merge / rennovatebot (pull_request) Has been skipped
Unit Tests / unittest (pull_request) Successful in 40s
COMMIT LINT / commitlint (pull_request) Successful in 1m49s
All checks were successful
Dependabot Auto-Merge / dependabot (pull_request) Has been skipped
Dependabot Auto-Merge / devopsbot (pull_request) Has been skipped
Dependabot Auto-Merge / rennovatebot (pull_request) Has been skipped
Unit Tests / unittest (pull_request) Successful in 40s
COMMIT LINT / commitlint (pull_request) Successful in 1m49s
This commit is contained in:
88
dist/index.js
vendored
88
dist/index.js
vendored
@@ -32845,7 +32845,7 @@ const debug = (core.getInput("debug") || "false").toLowerCase() === "true";
|
|||||||
|
|
||||||
const repoFull = process.env.GITHUB_REPOSITORY;
|
const repoFull = process.env.GITHUB_REPOSITORY;
|
||||||
const [owner, repo] = repoFull.split("/");
|
const [owner, repo] = repoFull.split("/");
|
||||||
const segment_id = ["docs", owner, repo].join(".");
|
const segment_id = ["docs", owner].join(".");
|
||||||
|
|
||||||
const serverUrl = (
|
const serverUrl = (
|
||||||
process.env.GITHUB_SERVER_URL ||
|
process.env.GITHUB_SERVER_URL ||
|
||||||
@@ -32868,9 +32868,66 @@ if (apiToken) {
|
|||||||
headers.Apitoken = apiToken;
|
headers.Apitoken = apiToken;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const file of markdownFiles) {
|
async function post(requestPayload, retries=0) {
|
||||||
const content = fs.readFileSync(file, "utf8");
|
try{
|
||||||
|
const response = await fetch(apiUrl, {
|
||||||
|
method: "POST",
|
||||||
|
headers,
|
||||||
|
body: JSON.stringify(requestPayload),
|
||||||
|
});
|
||||||
|
|
||||||
|
const responseText = await response.text();
|
||||||
|
if (!response.ok) {
|
||||||
|
core.setFailed(
|
||||||
|
`Agent API request failed (${response.status}): ${responseText}`,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
core.info(`Agent response: ${responseText}`);
|
||||||
|
}
|
||||||
|
} catch(e){
|
||||||
|
if(retries < 5){
|
||||||
|
return post(requestPayload, retries-1)
|
||||||
|
}
|
||||||
|
core.setFailed(`Error sending task to agent: ${error}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const file of markdownFiles) {
|
||||||
|
const content = fs.readFileSync(file, "utf8").trim();
|
||||||
|
const lines = content.split(/\r?\n/);
|
||||||
|
const h1Line = lines.find((line) => /^#\s+/.test(line)) || "";
|
||||||
|
|
||||||
|
const chunks = [];
|
||||||
|
let current = [];
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
if (/^##\s+/.test(line)) {
|
||||||
|
if (current.length) {
|
||||||
|
chunks.push(current.join("\n").trim());
|
||||||
|
}
|
||||||
|
current = [line];
|
||||||
|
} else {
|
||||||
|
current.push(line);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (current.length) {
|
||||||
|
chunks.push(current.join("\n").trim());
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedChunks =
|
||||||
|
chunks.length > 0
|
||||||
|
? chunks.map((chunk) => {
|
||||||
|
let chunkLines = chunk.split(/\r?\n/);
|
||||||
|
if (h1Line && chunkLines[0] === h1Line) {
|
||||||
|
chunkLines = chunkLines.slice(1);
|
||||||
|
}
|
||||||
|
const body = chunkLines.join("\n").trim();
|
||||||
|
return [h1Line, body].filter(Boolean).join("\n");
|
||||||
|
})
|
||||||
|
: [content];
|
||||||
|
|
||||||
|
normalizedChunks.forEach((chunk, index) => {
|
||||||
const requestPayload = {
|
const requestPayload = {
|
||||||
type: "input",
|
type: "input",
|
||||||
route,
|
route,
|
||||||
@@ -32881,9 +32938,11 @@ for (const file of markdownFiles) {
|
|||||||
method,
|
method,
|
||||||
inputs: {
|
inputs: {
|
||||||
segment_id,
|
segment_id,
|
||||||
document_id: file,
|
document_id: [repo, file.replace(".", ""), `part${index + 1}`].join(
|
||||||
embed_text: content,
|
".",
|
||||||
store_text: content,
|
),
|
||||||
|
embed_text: chunk,
|
||||||
|
store_text: chunk,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@@ -32896,21 +32955,8 @@ for (const file of markdownFiles) {
|
|||||||
core.info(`Request payload: ${JSON.stringify(requestPayload)}`);
|
core.info(`Request payload: ${JSON.stringify(requestPayload)}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
fetch(apiUrl, {
|
post(requestPayload);
|
||||||
method: "POST",
|
});
|
||||||
headers,
|
|
||||||
body: JSON.stringify(requestPayload),
|
|
||||||
})
|
|
||||||
.then((response) => {
|
|
||||||
if (!response.ok) {
|
|
||||||
core.setFailed(
|
|
||||||
`Agent API request failed (${response.status}): ${responseText}`,
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
core.info(`Agent response: ${response.text()}`);
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.catch((error) => core.setFailed(`Error sending task to agent: ${error}`));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = __webpack_exports__;
|
module.exports = __webpack_exports__;
|
||||||
|
|||||||
84
index.js
84
index.js
@@ -13,7 +13,7 @@ const debug = (core.getInput("debug") || "false").toLowerCase() === "true";
|
|||||||
|
|
||||||
const repoFull = process.env.GITHUB_REPOSITORY;
|
const repoFull = process.env.GITHUB_REPOSITORY;
|
||||||
const [owner, repo] = repoFull.split("/");
|
const [owner, repo] = repoFull.split("/");
|
||||||
const segment_id = ["docs", owner, repo].join(".");
|
const segment_id = ["docs", owner].join(".");
|
||||||
|
|
||||||
const serverUrl = (
|
const serverUrl = (
|
||||||
process.env.GITHUB_SERVER_URL ||
|
process.env.GITHUB_SERVER_URL ||
|
||||||
@@ -36,9 +36,66 @@ if (apiToken) {
|
|||||||
headers.Apitoken = apiToken;
|
headers.Apitoken = apiToken;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function post(requestPayload, retries=0) {
|
||||||
|
try{
|
||||||
|
const response = await fetch(apiUrl, {
|
||||||
|
method: "POST",
|
||||||
|
headers,
|
||||||
|
body: JSON.stringify(requestPayload),
|
||||||
|
});
|
||||||
|
|
||||||
|
const responseText = await response.text();
|
||||||
|
if (!response.ok) {
|
||||||
|
core.setFailed(
|
||||||
|
`Agent API request failed (${response.status}): ${responseText}`,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
core.info(`Agent response: ${responseText}`);
|
||||||
|
}
|
||||||
|
} catch(e){
|
||||||
|
if(retries < 5){
|
||||||
|
return post(requestPayload, retries-1)
|
||||||
|
}
|
||||||
|
core.setFailed(`Error sending task to agent: ${error}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for (const file of markdownFiles) {
|
for (const file of markdownFiles) {
|
||||||
const content = fs.readFileSync(file, "utf8").trim();
|
const content = fs.readFileSync(file, "utf8").trim();
|
||||||
|
const lines = content.split(/\r?\n/);
|
||||||
|
const h1Line = lines.find((line) => /^#\s+/.test(line)) || "";
|
||||||
|
|
||||||
|
const chunks = [];
|
||||||
|
let current = [];
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
if (/^##\s+/.test(line)) {
|
||||||
|
if (current.length) {
|
||||||
|
chunks.push(current.join("\n").trim());
|
||||||
|
}
|
||||||
|
current = [line];
|
||||||
|
} else {
|
||||||
|
current.push(line);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (current.length) {
|
||||||
|
chunks.push(current.join("\n").trim());
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedChunks =
|
||||||
|
chunks.length > 0
|
||||||
|
? chunks.map((chunk) => {
|
||||||
|
let chunkLines = chunk.split(/\r?\n/);
|
||||||
|
if (h1Line && chunkLines[0] === h1Line) {
|
||||||
|
chunkLines = chunkLines.slice(1);
|
||||||
|
}
|
||||||
|
const body = chunkLines.join("\n").trim();
|
||||||
|
return [h1Line, body].filter(Boolean).join("\n");
|
||||||
|
})
|
||||||
|
: [content];
|
||||||
|
|
||||||
|
normalizedChunks.forEach((chunk, index) => {
|
||||||
const requestPayload = {
|
const requestPayload = {
|
||||||
type: "input",
|
type: "input",
|
||||||
route,
|
route,
|
||||||
@@ -49,9 +106,11 @@ for (const file of markdownFiles) {
|
|||||||
method,
|
method,
|
||||||
inputs: {
|
inputs: {
|
||||||
segment_id,
|
segment_id,
|
||||||
document_id: file,
|
document_id: [repo, file.replace(".", ""), `part${index + 1}`].join(
|
||||||
embed_text: content,
|
".",
|
||||||
store_text: content,
|
),
|
||||||
|
embed_text: chunk,
|
||||||
|
store_text: chunk,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@@ -64,19 +123,6 @@ for (const file of markdownFiles) {
|
|||||||
core.info(`Request payload: ${JSON.stringify(requestPayload)}`);
|
core.info(`Request payload: ${JSON.stringify(requestPayload)}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
fetch(apiUrl, {
|
post(requestPayload);
|
||||||
method: "POST",
|
});
|
||||||
headers,
|
|
||||||
body: JSON.stringify(requestPayload),
|
|
||||||
})
|
|
||||||
.then((response) => {
|
|
||||||
if (!response.ok) {
|
|
||||||
core.setFailed(
|
|
||||||
`Agent API request failed (${response.status}): ${responseText}`,
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
core.info(`Agent response: ${response.text()}`);
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.catch((error) => core.setFailed(`Error sending task to agent: ${error}`));
|
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user