Skip to content

Commit

Permalink
C:/Program Files/Git/v1 API ML Auth
Browse files Browse the repository at this point in the history
Signed-off-by: Salisu Ali <[email protected]>
  • Loading branch information
salisbuk7897 committed Aug 20, 2021
1 parent a90fae8 commit 8c159d4
Show file tree
Hide file tree
Showing 13 changed files with 559 additions and 482 deletions.
28 changes: 26 additions & 2 deletions Documentation/User Documentation/2. Configure and Run App.md
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,13 @@ ZEBRA’s configuration gives users the flexibility to run the app according to
"authSource":"admin",
"grafanaurl":"localhost",
"grafanaport":"3000",
"grafanahttptype": "http"
"grafanahttptype": "http",
"apiml_http_type" : "https",
"apiml_IP" : "localhost",
"apiml_port" : "10010",
"apiml_auth_type" : "bypass",
"apiml_username" : "username",
"apiml_password" : "password"
}
```

Expand Down Expand Up @@ -136,7 +142,25 @@ ZEBRA’s configuration gives users the flexibility to run the app according to

18. **grafanaurl**: IP address or URL to Prometheus/Grafana service. E.g ***grafanaurl : localhost***

19. **grafanaport**: Port used to access Grafana service. E.g ***grafanaport : 3000***
19. **grafanaport**: Port used to access Grafana service. E.g ***grafanaport : 3000***

20. **apiml_http_type**: This refers to the Hypertext transfer Protocol type used by Zowe API Mediation Layer. E.g ***apiml_http_type: “https”***

21. **apiml_IP**: This refers to the IP of the ZOWE API Mediation Layer. E.g ***apiml_IP: “localhost”***

22. **apiml_port**: This refers to the port of the Zowe API Mediation layer. E.g ***apiml_port: “10010”***

23. **apiml_auth_type**: This refers to the authentication type to be used by ZOWE API Mediation Layer. E.g ***apiml_auth_type: “BYPASS”***. It takes one of two values:
- **BYPASS**
This tells ZEBRA to Bypass ZOWE API ML Authentication.

- **ZOWEJWT**
This tells ZEBRA to USE JSON WEB TOKEN for ZOWE API Mediation Layer Authentication.

24. **apiml_username**: This refers to ZOWE API Mediation layer Username for Authentication. It is REQUIRED if apiml_auth_type is set to ZOWEJWT. E.g ***apiml_username: “user”***

25. **apiml_password**: This refers to ZOWE API Mediation layer password for Authentication. It is REQUIRED if apiml_auth_type is set to ZOWEJWT. E.g ***apiml_password: “user”***


## How to Run App
To run Zebra App on a server or Local Machine, a user can choose to run the app using ```npm```, ```nodemon``` or ```pm2```.
Expand Down
Binary file not shown.
2 changes: 1 addition & 1 deletion src/Zebra.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ services:
- gatewayUrl: api/static
serviceRelativeUrl: /static
authentication:
scheme: httpBasicPassTicket
scheme: ZoweJwt
applid: ZOWEAPPL
apiInfo:
- apiId: localhost.zebra.3090
Expand Down
26 changes: 13 additions & 13 deletions src/Zebra_Swagger.json
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
],
"produces": ["application/json;charset=UTF-8"],
"paths": {
"/v1/{lpar}/rmf3/{report}": {
"/v1/{lpar}/rmf3/{report}/apiml": {
"get": {
"tags": [
"RMF Monitor III"
Expand Down Expand Up @@ -72,7 +72,7 @@
"deprecated": false
}
},
"/v1/{lpar}/rmf3/{report}?parm={value}": {
"/v1/{lpar}/rmf3/{report}/apiml?parm={value}": {
"get": {
"tags": [
"RMF Monitor III"
Expand Down Expand Up @@ -131,7 +131,7 @@
"deprecated": false
}
},
"/v1/{lpar}/rmf3/{report}?lpar_parms={value}": {
"/v1/{lpar}/rmf3/{report}/apiml?lpar_parms={value}": {
"get": {
"tags": [
"RMF Monitor III"
Expand Down Expand Up @@ -190,7 +190,7 @@
"deprecated": false
}
},
"/v1/{lpar}/rmf3/{report}?parm={value1}&lpar_parms={value2}": {
"/v1/{lpar}/rmf3/{report}/apiml?parm={value1}&lpar_parms={value2}": {
"get": {
"tags": [
"RMF Monitor III"
Expand Down Expand Up @@ -258,7 +258,7 @@
"deprecated": false
}
},
"/v1/{lpar}/rmf3/{report}?job={value}": {
"/v1/{lpar}/rmf3/{report}/apiml?job={value}": {
"get": {
"tags": [
"RMF Monitor III"
Expand Down Expand Up @@ -317,7 +317,7 @@
"deprecated": false
}
},
"/v1/{lpar}/rmf3/{report}?resource={resource}": {
"/v1/{lpar}/rmf3/{report}/apiml?resource={resource}": {
"get": {
"tags": [
"RMF Monitor III"
Expand Down Expand Up @@ -485,7 +485,7 @@
"deprecated": false
}
},
"/v1/{lpar}/rmfpp/{report}?start={value1}&end={value2}": {
"/v1/{lpar}/rmfpp/{report}/apiml?start={value1}&end={value2}": {
"get": {
"tags": [
"RMF Monitor I"
Expand Down Expand Up @@ -553,7 +553,7 @@
"deprecated": false
}
},
"/v1/{lpar}/rmfpp/{report}?start={value1}&end={value2}&SvcCls={value3}": {
"/v1/{lpar}/rmfpp/{report}/apiml?start={value1}&end={value2}&SvcCls={value3}": {
"get": {
"tags": [
"RMF Monitor I"
Expand Down Expand Up @@ -630,7 +630,7 @@
"deprecated": false
}
},
"/v1/{lpar}/rmfpp/{report}?start={value1}&end={value2}&SvcCls={value3}&Time={value4}": {
"/v1/{lpar}/rmfpp/{report}/apiml?start={value1}&end={value2}&SvcCls={value3}&Time={value4}": {
"get": {
"tags": [
"RMF Monitor I"
Expand Down Expand Up @@ -716,7 +716,7 @@
"deprecated": false
}
},
"/v1/{lpar}/rmfpp/{report}?start={value1}&end={value2}&SvcCls={value3}&duration={value4}": {
"/v1/{lpar}/rmfpp/{report}/apiml?start={value1}&end={value2}&SvcCls={value3}&duration={value4}": {
"get": {
"tags": [
"RMF Monitor I"
Expand Down Expand Up @@ -802,7 +802,7 @@
"deprecated": false
}
},
"/v1/{lpar}/rmfpp/{report}?start={value1}&end={value2}&Wlkd={value3}": {
"/v1/{lpar}/rmfpp/{report}/apiml?start={value1}&end={value2}&Wlkd={value3}": {
"get": {
"tags": [
"RMF Monitor I"
Expand Down Expand Up @@ -879,7 +879,7 @@
"deprecated": false
}
},
"/v1/{lpar}/rmfpp/{report}?start={value1}&end={value2}&Wlkd={value3}&Time={value4}": {
"/v1/{lpar}/rmfpp/{report}/apiml?start={value1}&end={value2}&Wlkd={value3}&Time={value4}": {
"get": {
"tags": [
"RMF Monitor I"
Expand Down Expand Up @@ -965,7 +965,7 @@
"deprecated": false
}
},
"/v1/{lpar}/rmfpp/{report}?start={value1}&end={value2}&Wlkd={value3}&duration={value4}": {
"/v1/{lpar}/rmfpp/{report}/apiml?start={value1}&end={value2}&Wlkd={value3}&duration={value4}": {
"get": {
"tags": [
"RMF Monitor I"
Expand Down
140 changes: 87 additions & 53 deletions src/app_server/parser/RMFPPparser.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,62 +11,81 @@ module.exports.bodyParserforRmfCPUPP = function (data, fn) {//Function to parse
try { //try to process xml
parser.parseString(data, function (err, result) {
var finalJSON = []; // Collection for storing JSON of Parsed CPU XML
var key1 = result['ddsml']['postprocessor']; // keys1 represent all postprocessor report sections
//For loop to create dictionary/JSON
for (k in key1) { //looping through all postprocessor sections, one at a time: k is an integer startinf from zero and increase with each iteration
var cpuMachineType = key1[k]['segment'][0]['part'][0]['var-list'][0]['var'][0]['value'][0];
var cpuModelType = key1[k]['segment'][0]['part'][0]['var-list'][0]['var'][1]['value'][0];
var CPUSectiontable = key1[k]['segment'][0]['part'][0]['table'][0]["row"] // Rows in CPU Section of XML
var CPUsectioncolumnheader = key1[k]['segment'][0]['part'][0]['table'][0]['column-headers'][0]['col']; // Column headers in CPU Section of XML
var partitionDataName = key1[k]['segment'][1]['part'][2]['name'][0]; // Partion data section name
var partitionDataTable = key1[k]['segment'][1]['part'][2]['table'][0]['row']; // Table of the partition data section
var partitionDataColumnHead = key1[k]['segment'][1]['part'][2]['table'][0]['column-headers'][0]['col']; //Columnhead of the partition data section
var resourceName = key1[k]['resource'][0]['resname'][0];
var resourceType = key1[k]['resource'][0]['restype'][0];
var PDCH = [] //partition data columnheader collection
var FPDR = [] //final partition data report collection
for (i in partitionDataColumnHead) { //looping through partitionDataColumnHead
PDCH[i] = partitionDataColumnHead[i]['_']; //populating the partition data columnheader collection
};
var postprocessors = result['ddsml']['postprocessor']; // keys1 represent all postprocessor report sections
for (a in postprocessors) {
var singleCPUReport = {};
var segments = postprocessors[a]['segment'];
var resourceName = postprocessors[a]['resource'][0]['resname'][0];
var allSegmentCollection = {};
for (b in segments) {
var parts = segments[b]['part'];
var segmentName = segments[b]['name'][0];
var segmentCollection = {};
var partCollection = {};
for (c in parts) {
var partName = parts[c]['name'];
var varlist = parts[c]['var-list'];
var table = parts[c]['table'];
var fieldCollection = {};

for (i in partitionDataTable) { //looping through partitionDatatable
var PDTB = {} //Partition data Table body collection
for (j in PDCH) { //looping through partition Data ColumnHead
PDTB[PDCH[j]] = partitionDataTable[i].col[j]; //creating a key value pairs for each lpar in the partion data table
}
FPDR.push(PDTB); //populating final partition data report collection
};
if (varlist) {
var variables = varlist[0]['var'];
for (d in variables) {
fieldCollection[variables[d]['name'][0]] = variables[d]['value'][0];
}
}

CPUColumnhead = [] // CPU columnheader collection
for (i in CPUsectioncolumnheader) { //looping through CPUsectioncolumnheader
CPUColumnhead[i] = CPUsectioncolumnheader[i]['_']; //populating the CPU columnheader collection
};
if (table) {
var tableColumnHeader = table[0]['column-headers'][0]['col'];
var tableBody = table[0]['row'];
var columnheadCollection = [];
var finalTableReport = [];
for (i in tableColumnHeader) {
columnheadCollection[i] = tableColumnHeader[i]['_'];
}

finalCPUTable = [];//final CPU report collection
for (i in CPUSectiontable) { //looping through CPUsectiontable
CPUtable = {} //CPU table data collection
for (j in CPUColumnhead) {//looping through CPU ColumnHead
CPUtable[CPUColumnhead[j]] = CPUSectiontable[i].col[j]; //creating a key value pairs for each CPU in the CPU table
}
finalCPUTable.push(CPUtable); //populating final CPU report collection
};
if (tableBody !== undefined) {
for ( i in tableBody) {
var CPUtable = {}
for (j in columnheadCollection) {
CPUtable[columnheadCollection[j]] = tableBody[i]['col'][j];
}
finalTableReport.push(CPUtable);
}
}

var finalCPUReport = { Machine: cpuMachineType, Model: cpuModelType, Table: finalCPUTable };
if (!varlist) {
fieldCollection = finalTableReport;
} else {
fieldCollection["Table"] = finalTableReport;
}
}

parsedPostprocessor = {}; //collection for individual parsed postprocessor, one at a time
parsedPostprocessor['Report'] = key1[k]['metric'][0]["description"][0]; // Report key value pair
parsedPostprocessor['Timestamp'] = key1[k]['time-data'][0]['display-start'][0]['_']; // Timestamp key value pair
parsedPostprocessor[resourceType] = resourceName;
parsedPostprocessor['CPU'] = finalCPUReport; // CPU key value pair
parsedPostprocessor[partitionDataName] = FPDR;
//parsedPostprocessor['partitionDataName'] = partitionDataName; // partitionDataName key value pair
//parsedPostprocessor['partitionDataBody'] = FPDR; // partionDataBody key value pair
if (partName) {
partCollection[partName] = fieldCollection;
} else {
partCollection['Info'] = fieldCollection;
}
if (segmentCollection) {
allSegmentCollection[segmentName] = partCollection;
} else {
allSegmentCollection['Segment'] = partCollection;
}
}
}
singleCPUReport['Report'] = postprocessors[a]['metric'][0]["description"][0];
singleCPUReport["System"] = resourceName;
singleCPUReport['Timestamp'] = postprocessors[a]['time-data'][0]['display-start'][0]['_'];
singleCPUReport = {
...singleCPUReport,
...allSegmentCollection
};

finalJSON.push(parsedPostprocessor); //populating FinalJSON with parsedPostprocessor
finalJSON.push(singleCPUReport);
}
fn(finalJSON); //function returns parsed postprocessor
fn(finalJSON);
});
} catch (err) {// if parsing XML didn't went smooth
} catch (err) {// if parsing XML didn't went smooth
fn({msg: 'Err', error: err, data: data});// return the error
}
}
Expand All @@ -84,11 +103,12 @@ module.exports.bodyParserforRmfWLMPP = function (data, fn) {//Function to parse
for (a in postprocessors) { // loop through all postprocessor sections
var singleWLMPPReport = {}; // JSON Collection for a single postprocession section of the XML
var segments = postprocessors[a]['segment']; // represent segments each postprocessor section
var resourceName = postprocessors[a]['resource'][0]['resname'][0];
AllsegmentCollection = []; // A JSON collection for all segments
for (b in segments) { // loop through segments in the XML
var parts = segments[b]['part']; // represent segment part value in the XML
var message = segments[b]['message']; // represent segment message value in the XML
var segmentName = segments[b]['name']; // represent segment name in the XML
var segmentName = segments[b]['name'][0]; // represent segment name in the XML
segmentCollection = {}; //A JSON for single XML segment
if (message) { // if segment contains mesaage atrributes in the XML
var messageDescription = message[0]['description'][0]; // represent message description
Expand Down Expand Up @@ -131,7 +151,12 @@ module.exports.bodyParserforRmfWLMPP = function (data, fn) {//Function to parse
finaltableReport.push(WLMtable);
};
}
fieldCollection["Table"] = finaltableReport;

if (!varlist) {
fieldCollection = finaltableReport;
} else {
fieldCollection["Table"] = finaltableReport;
}
}
// Add to part
if (partName) { // if part name atrribute is present in the XML
Expand All @@ -140,17 +165,26 @@ module.exports.bodyParserforRmfWLMPP = function (data, fn) {//Function to parse
partCollection['Info'] = fieldCollection; //populate part collections with "info" as key and finaltableReport as value
}
if (segmentName) { // if segment name atrribute is present in the XML
segmentCollection[segmentName] = partCollection; //populate segment collections with segmentname as key and partCollection as value
segmentCollection = {
'Name': segmentName,
...segmentCollection,
...partCollection
}
} else { // if segment name atrribute is not present in the XML
segmentCollection['Segment'] = partCollection; //populate segment collections with "Info" as key and partCollection as value
segmentCollection = {
'Name': 'N/A',
...segmentCollection,
...partCollection
}
}

}
AllsegmentCollection.push(segmentCollection) //Push segment collection into all segments
}
singleWLMPPReport['Report'] = postprocessors[a]['metric'][0]["description"][0]; //singleWLMPPReport report key value pair
singleWLMPPReport['Sysplex'] = resourceName;
singleWLMPPReport['Timestamp'] = postprocessors[a]['time-data'][0]['display-start'][0]['_']; //singleWLMPPReport Timestamp key value pair
singleWLMPPReport['ReportSegments'] = AllsegmentCollection; //singleWLMPPReport reportSegments key value pairs
singleWLMPPReport['Classes'] = AllsegmentCollection; //singleWLMPPReport reportSegments key value pairs

//console.log(ppReport);
allWLMPPJSON.push(singleWLMPPReport); //push single workload postprocessor JSON into All workload post processor JSON
Expand Down
32 changes: 32 additions & 0 deletions src/app_server/routes/mainRouter.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,40 @@ let grafanabaseurl = Zconfig.grafanaurl;
let grafanabaseport = Zconfig.grafanaport;
let grafanahttptype = Zconfig.grafanahttptype;
const axios = require('axios');
const { send } = require('process');
const grafanaServer = `${grafanahttptype}://${grafanabaseurl}:${grafanabaseport}`



// Zebra API ML cookie checker
router.get('/apimlcookie', function(req, res, next){
if (req.cookies.apimlAuthenticationToken == undefined){
res.send("No Cookie");
}else{
res.send(req.cookies.apimlAuthenticationToken);
}
})

router.post('/apimllogin', function(req, res, next){
axios.post('https://localhost:10010/api/v1/gateway/auth/login', {
"username": req.body.username,
"password": req.body.password
})
.then(function (response) {
if(response.headers["set-cookie"]){
var res_head = response.headers["set-cookie"][0].split("=");
var token_split = res_head[1].split(";");
var token = token_split[0];
res.send(token);
}else{
res.send("error");
}
})
.catch(function (error) {
res.send("error");
});
})

// Zebra UI routers

// Checks if user login session is available in browser
Expand Down
Loading

0 comments on commit 8c159d4

Please sign in to comment.