The plugin I'm working on is for Kibana 7.16.3.
The server side code currently looks like the following:
import { schema } from '@kbn/config-schema';
import { logger } from 'elastic-apm-node';
import { IRouter } from '../../../../src/core/server';
import { ComplexityAndChurnFactory } from "../resources/cxchquery";
import { validateBody, linearmap } from "../resources/utility";
let elasticSearchHost = ""
export function defineHosts(host: string) {
elasticSearchHost = host
}
export function defineRoutes(router: IRouter) {
router.get(
{
path: '/api/complexity_and_churn/agg',
validate: {
params: schema.object({}),
body: schema.object({
Size: schema.number({}),
Index: schema.string({}),
StartDate: schema.string({}),
EndDate: schema.string({}),
FileTypeFilters: schema.arrayOf(schema.string({}), {})
}, { })
},
},
async (context, request, response) => {
console.log(`Recv Req: ${JSON.stringify(request.body)}`);
let reqBody = request.body;
validateBody(reqBody);
let query = ComplexityAndChurnFactory(reqBody.Index, reqBody.StartDate, reqBody.EndDate, reqBody.FileTypeFilters, 10000);
let resultSize = reqBody.Size;
let minScore = 0;
let maxScore = 50;
// If the user needs to scan over 10 million files after date range and filtering, there is likely a bigger problem.
const MAX_QUERIES = 1000;
let topXScores: Array<Object> = []
/**Strategy for getting top scores in one pass of the dataset
* Composite aggreggation returns subset of data => update global min/max complexity/churn based on this data.
* Based on global min/max complexity/churn, calculate the score of the composite aggregation subset.
* Based on global min/max complexity/churn, update the score of the previously saved top scores.
* Join the current aggregation subset and previously saved top scores into one dataset.
* Remove all but the top x scores.
* Repeat with previous composite aggregation after key until data is exhausted.
*/
let minComplexity = Number.POSITIVE_INFINITY;
let maxComplexity = Number.NEGATIVE_INFINITY;
let minChurn = Number.POSITIVE_INFINITY;
let maxChurn = Number.NEGATIVE_INFINITY;
let i = 0;
for (i=0; i<MAX_QUERIES; i++)
{
let resp = await context.core.elasticsearch.client.asCurrentUser.search(
query
);
logger.info(`query responded with: ${resp}`);
// Check for completion
let buckets = resp.body.aggregations.buckets.buckets;
if (buckets.length == 0 || !query?.after_key) {
break;
}
// Set up next query if buckets were returned.
query.after_key = resp.body.aggregations.buckets.after_key;
minComplexity = buckets.reduce((p: Object, v: Object)=>p.complexity.value < v.complexity.value? p.complexity.value : v.complexity.value, minComplexity);
maxComplexity = buckets.reduce((p: Object, v: Object)=>p.complexity.value > v.complexity.value? p.complexity.value : v.complexity.value, maxComplexity);
minChurn = buckets.reduce((p: Object, v: Object)=>p.churn.value < v.churn.value? p.churn.value : v.churn.value, minChurn);
maxChurn = buckets.reduce((p: Object, v: Object)=>p.churn.value > v.churn.value? p.churn.value : v.churn.value, maxChurn);
// Recalculate scores for topXScores based on updated min and max complexity and churn.
topXScores.forEach(element => {
let complexityScore = linearmap(element.complexity.value, minComplexity, maxComplexity, minScore, maxScore);
let churnScore = linearmap(element.churn.value, minChurn, maxChurn, minScore, maxScore);
element.score = complexityScore + churnScore;
});
// For new data, calculate score and add to topXScores array.
buckets.forEach(element => {
let complexityScore = linearmap(element.complexity.value, minComplexity, maxComplexity, minScore, maxScore);
let churnScore = linearmap(element.churn.value, minChurn, maxChurn, minScore, maxScore);
element.score = complexityScore + churnScore;
topXScores.push(element);
});
// Sort the topXScores by score.
topXScores = topXScores.sort((a, b) => a.score - b.score);
// Remove all but the top x scores from the array.
let numberBucketsToRemove = Math.max(topXScores.length - resultSize, 0);
topXScores.splice(0, numberBucketsToRemove);
}
if (i == MAX_QUERIES) {
throw new Error(`[ERROR] Exceeded maximum allowed queries (${MAX_QUERIES}) for composite aggregations please reach out to an administrator to get this amount changed or limit your query's date range and filters.`)
}
return response.ok({
body: {
buckets: topXScores
}
});
}
);
}
When I make a request to the endpoint like in the following:
curl --request GET 'http://localhost:5601/api/complexity_and_churn/agg' --header 'kbn-xsrf: anything' --header 'content-type: application/json; charset=utf-8' --header 'Authorization: Basic <Auth>' -d '{
"Size": 100,
"Index": "mainindexfour",
"StartDate": "2010/10/10",
"EndDate": "2022/10/10",
"FileTypeFilters": ["xml"]
}'
I get the response:
{
"statusCode": 400,
"error": "Bad Request",
"message": "[request body.Size]: expected value of type [number] but got [undefined]"
}
If I remove the validation on the body and print out JSON.stringify(request.body), I see that it is an empty object, regardless of what data I send.
Is my server side code or the request I'm sending incorrect?