cloud, JavaScript

Export Violations From JFrog Xray to CSV

The trend of #DevSecOps is growing fast and it is no longer just part of your security team. More and more organizations wish to integrate their security team in all the phases of development and operations. To achieve it, there are cases where you need to export data from JFrog Xray (in our case to a CSV format) so you can ingest it to your current logging/monitoring system. 

What is Xray?

In a nutshell, JFrog Xray works with JFrog Artifactory to perform a deep analysis of binary components at any stage of the application lifecycle. It provides full transparency that leads to more trust in your software. 

By scanning binary components and their metadata, recursively going through dependencies at any level (think on the layers you have in any Docker container), JFrog Xray provides great visibility into issues lurking in components anywhere in your organization.

Xray API

One of the best parts is that JFrog Xray is also fully automated through a rich REST API. We will use it to create this Exporter. Please feel free to clone/fork the code below and use it, but remember you might need to add pagination and a watchdog for a real system.

Code

Fetching all the violations per watch:

//
// Fetch Xray Data into CSV and later any DB/Logging/Monitoring tool you wish.
//
// @author: Ido Green | @greenido
// @date: March 2020
//
// @see:
// https://greenido.wordpress.com/2019/12/04/how-to-build-an-integration-with-jfrog-xray/
// https://greenido.wordpress.com/2020/02/12/continuous-software-updates/
//
// https://csvkit.readthedocs.io/en/latest/
//
// init project
const fs = require("fs");
var request = require("request");
require('dotenv').config()
// TODO: change these values in your .env file to meet your credentials
const BASE_URL = process.env.BASE_URL;
const USERNAME = process.env.XRAY_USER;
const PASSWORD = process.env.XRAY_PASS;
//
// Fetch violations from Xray
// TODO: change the 'watch_name' to a watch that is defined at your Xray
//
function fetchViolations() {
// Create the base64 auth string
let auth = 'Basic ' + Buffer.from(USERNAME + ':' + PASSWORD).toString('base64');
// Using the REST API on: /api/v1/violations
// With an example payload for specific watch_name.
var options = {
method: "POST",
url: "http://" + BASE_URL + "/api/v1/violations",
headers: {
"Content-Type": "application/json",
Authorization: auth
},
body: JSON.stringify({
"filters": { "watch_name": "step3-create-docker-image-product" },
"pagination": { "order_by": "updated", "limit": 25, "offset": 1 }
})
};
request(options, function(error, response) {
if (error) {
console.log("😳 ERROR with fetching violations: ");
console.log(error);
return;
}
console.log("Status code: "+ response.statusCode);
if (response.statusCode == 200) {
let resJson = JSON.parse(response.body);
console.log("🏵 Total Violations: " + resJson.total_violations);
console.log("Violations: " + response.body);
// Now let's convert the JSON to CSV and save it
exportJson2Csv(resJson);
}
});
}
//
// Export the JSON to CSV
//
function exportJson2Csv(jsonInput) {
var json = jsonInput.violations;
var fields = [
"description",
"severity",
"type",
"infected_component",
"created",
"watch_name",
"issue_id",
"violation_details_url",
"impacted_artifacts"
];
var replacer = function(key, value) {
if (value === null || value === undefined) {
// handle null cases in Xray response
return "";
} else {
if ( (key.indexOf("infected_component") > -1 ||
key.indexOf("impacted_artifacts" > -1)) &&
Array.isArray(value)) {
// We have an array to save in one cell
value = value.join(" -- ");
return value;
}
else if (key.indexOf("violation_details_url") > -1 ) {
// let's save it in a seperate file so we can run on it later to add more details.
saveDetailedUrl(value);
return value;
}
}
// This will make sure we aren't breaking our CSV
value = value.replace(/,/g, ';');
value = value.replace(/\"/g, "'");
return value;
};
// Building the CSV output
var csv = json.map(function(row) {
return fields
.map(function(fieldName) {
let cellValue = replacer(fieldName, row[fieldName]);
let desc = row[fieldName];
return JSON.stringify(cellValue);
})
.join(",");
});
// Add header column
csv.unshift(fields.join(","));
csv = csv.join("\r\n");
//
// save it to a CSV file
//
let curTime = new Date().getTime();
fs.writeFile("./xray-violations-" + curTime + ".csv", csv, err => {
if (err) {
console.log("😳 Error writing CSV file", err);
} else {
console.log("🥎 Successfully wrote the CSV file");
}
});
}
//
// Saving the details url on a file we can use in the next script (with another process).
//
function saveDetailedUrl(detailUrl) {
fs.appendFile("./xray-violations-details-urls-" + curTime + ".txt", detailUrl + "\n", err => {
if (err) {
console.log("😳 Error writing detailUrl: ", err);
}
// else { console.log("🥎 Successfully wrote detailUrl:" + detailUrl); }
});
}
//
// Let's start the work
//
fetchViolations();

Fetching more details per violation:

//
// Fetch more details per violation
// If you wish to merge it - the issue_id is the primary key
//
// @author: Ido Green | @greenido
// @date: Feb 2020
//
// @see:
// https://greenido.wordpress.com/2019/12/04/how-to-build-an-integration-with-jfrog-xray/
// https://greenido.wordpress.com/2020/02/12/continuous-software-updates/
//
// init project
const fs = require("fs");
var rp = require('request-promise');
require("dotenv").config();
// TODO: change these values in your .env file to meet your credentials
const BASE_URL = process.env.BASE_URL;
const USERNAME = process.env.XRAY_USER;
const PASSWORD = process.env.XRAY_PASS;
const VIOLATION_URLS = process.env.VIOLATION_URLS;
// Use to have uniqe file names for a certain run
let curTime = new Date().getTime();
// The details we collecting per violation url
let outputStr = "Issue Id, Description, Summary, Properties \n";
//
// Read all the urls and fetch for each one the details we wish to gain.
//
async function fetchDetailsPerViolation() {
let auth = 'Basic ' + Buffer.from(USERNAME + ':' + PASSWORD).toString('base64');
let urls = fs.readFileSync(VIOLATION_URLS).toString().split("\n");
for(i in urls) {
console.log(urls[i]);
var options = {
'method': 'GET',
'url': urls[i],
headers: {
"Content-Type": "application/json",
Authorization: auth
}
};
await rp(options).then(function (response) {
// An example for the full response can be found here: https://gist.github.com/greenido/c25c7187fc44df0eabe6cf9b3f4792ae
let retJson = JSON.parse(response);
// Let's build our output CSV string
outputStr += '"' + retJson.issue_id + '", "' + retJson.description + '", "' +
retJson.summary + '", "' + JSON.stringify(retJson.properties) + '" \n';
})
.catch(function (error) {
console.log("😳 Error with rp call: " + JSON.stringify(options) );
})
}
}
//
// save to CSV file
//
function saveCSVfile(csvData) {
let fileName = "./xray-details-violations-" + curTime + ".csv";
fs.writeFile(fileName, csvData, err => {
if (err) {
console.log("😳 Error writing: " + fileName , err);
} else {
console.log("🥎 Successfully wrote: " + fileName);
}
});
}
//
// Start the work
//
(async () => {
await fetchDetailsPerViolation();
saveCSVfile(outputStr);
})();

Demo

Demo site – You can see how the API works with a mock object.

Demo Code – So you can see/clone/fork it.

Docker

🐸 You can run it with this Docker Image as well. It’s just one line:

docker run -v /tmp/xray-csv/:/home/node/app/xray-csv  --env-file .env -p 80:8080 -d greenido/jfrog-xray-exporter-csv

All you need is to make sure that /tmp/xray-csv/ is a valid direction on your machine as this is the place the CSV files will be stored.

You need to fill the .env file with the following fields:

      BASE_URL=ip-address:port/xray 

      XRAY_USER=Your-user 

      XRAY_PASS=the-password 

      EXPORTER_PORT=8080 

      # Query params
      MIN_SEVERITY=Low
      VIO_LIMIT=2500

Be safe and happy!

Standard

Leave a comment