1&&(e.result+=U.repeat("\n",t-1))}function b(e,t,a){var s,c,u,l,p,f,d,m,g,y=e.kind,b=e.result;if(g=e.input.charCodeAt(e.position),r(g)||o(g)||35===g||38===g||42===g||33===g||124===g||62===g||39===g||34===g||37===g||64===g||96===g)return!1;if((63===g||45===g)&&(c=e.input.charCodeAt(e.position+1),r(c)||a&&o(c)))return!1;for(e.kind="scalar",e.result="",u=l=e.position,p=!1;0!==g;){if(58===g){if(c=e.input.charCodeAt(e.position+1),r(c)||a&&o(c))break}else if(35===g){if(s=e.input.charCodeAt(e.position-1),r(s))break}else{if(e.position===e.lineStart&&v(e)||a&&o(g))break;if(n(g)){if(f=e.line,d=e.lineStart,m=e.lineIndent,x(e,!1,-1),e.lineIndent>=t){p=!0,g=e.input.charCodeAt(e.position);continue}e.position=l,e.line=f,e.lineStart=d,e.lineIndent=m;break}}p&&(h(e,u,l,!1),A(e,e.line-f),u=l=e.position,p=!1),i(g)||(l=e.position+1),g=e.input.charCodeAt(++e.position)}return h(e,u,l,!1),e.result?!0:(e.kind=y,e.result=b,!1)}function w(e,t){var i,r,o;if(i=e.input.charCodeAt(e.position),39!==i)return!1;for(e.kind="scalar",e.result="",e.position++,r=o=e.position;0!==(i=e.input.charCodeAt(e.position));)if(39===i){if(h(e,r,e.position,!0),i=e.input.charCodeAt(++e.position),39!==i)return!0;r=o=e.position,e.position++}else n(i)?(h(e,r,o,!0),A(e,x(e,!1,t)),r=o=e.position):e.position===e.lineStart&&v(e)?f(e,"unexpected end of the document within a single quoted scalar"):(e.position++,o=e.position);f(e,"unexpected end of the stream within a single quoted scalar")}function C(e,t){var i,r,o,c,u,l;if(l=e.input.charCodeAt(e.position),34!==l)return!1;for(e.kind="scalar",e.result="",e.position++,i=r=e.position;0!==(l=e.input.charCodeAt(e.position));){if(34===l)return h(e,i,e.position,!0),e.position++,!0;if(92===l){if(h(e,i,e.position,!0),l=e.input.charCodeAt(++e.position),n(l))x(e,!1,t);else if(256>l&&nt[l])e.result+=it[l],e.position++;else if((u=s(l))>0){for(o=u,c=0;o>0;o--)l=e.input.charCodeAt(++e.position),(u=a(l))>=0?c=(c<<4)+u:f(e,"expected hexadecimal character");e.result+=String.fromCharCode(c),e.position++}else f(e,"unknown escape sequence");i=r=e.position}else n(l)?(h(e,i,r,!0),A(e,x(e,!1,t)),i=r=e.position):e.position===e.lineStart&&v(e)?f(e,"unexpected end of the document within a double quoted scalar"):(e.position++,r=e.position)}f(e,"unexpected end of the stream within a double quoted scalar")}function k(e,t){var n,i,o,a,s,c,u,l,p,d,h,m=!0,y=e.tag;if(h=e.input.charCodeAt(e.position),91===h)a=93,u=!1,i=[];else{if(123!==h)return!1;a=125,u=!0,i={}}for(null!==e.anchor&&(e.anchorMap[e.anchor]=i),h=e.input.charCodeAt(++e.position);0!==h;){if(x(e,!0,t),h=e.input.charCodeAt(e.position),h===a)return e.position++,e.tag=y,e.kind=u?"mapping":"sequence",e.result=i,!0;m||f(e,"missed comma between flow collection entries"),p=l=d=null,s=c=!1,63===h&&(o=e.input.charCodeAt(e.position+1),r(o)&&(s=c=!0,e.position++,x(e,!0,t))),n=e.line,N(e,t,B,!1,!0),p=e.tag,l=e.result,x(e,!0,t),h=e.input.charCodeAt(e.position),!c&&e.line!==n||58!==h||(s=!0,h=e.input.charCodeAt(++e.position),x(e,!0,t),N(e,t,B,!1,!0),d=e.result),u?g(e,i,p,l,d):i.push(s?g(e,null,p,l,d):l),x(e,!0,t),h=e.input.charCodeAt(e.position),44===h?(m=!0,h=e.input.charCodeAt(++e.position)):m=!1}f(e,"unexpected end of the stream within a flow collection")}function j(e,t){var r,o,a,s,u=Z,l=!1,p=t,d=0,m=!1;if(s=e.input.charCodeAt(e.position),124===s)o=!1;else{if(62!==s)return!1;o=!0}for(e.kind="scalar",e.result="";0!==s;)if(s=e.input.charCodeAt(++e.position),43===s||45===s)Z===u?u=43===s?z:J:f(e,"repeat of a chomping mode identifier");else{if(!((a=c(s))>=0))break;0===a?f(e,"bad explicit indentation width of a block scalar; it cannot be less than one"):l?f(e,"repeat of an indentation width identifier"):(p=t+a-1,l=!0)}if(i(s)){do s=e.input.charCodeAt(++e.position);while(i(s));if(35===s)do s=e.input.charCodeAt(++e.position);while(!n(s)&&0!==s)}for(;0!==s;){for(y(e),e.lineIndent=0,s=e.input.charCodeAt(e.position);(!l||e.lineIndentp&&(p=e.lineIndent),n(s))d++;else{if(e.lineIndent
t)&&0!==o)f(e,"bad indentation of a sequence entry");else if(e.lineIndentt)&&(N(e,t,W,!0,a)&&(m?d=e.result:h=e.result),m||(g(e,l,p,d,h),p=d=h=null),x(e,!0,-1),c=e.input.charCodeAt(e.position)),e.lineIndent>t&&0!==c)f(e,"bad indentation of a mapping entry");else if(e.lineIndentt))return!1;g=!0}if(g)for(;E(e)||O(e);)if(x(e,!0,-1))if(m=!0,e.lineIndent>t)g=!0,s=o;else{if(e.lineIndent!==t)return!0;g=!1,s=o}else s=!1;if(s&&(s=m||r),(g||W===n)&&(p=B===n||G===n?t:t+1,h=e.position-e.lineStart,g?s&&(I(e,h)||S(e,h,p))||k(e,p)?y=!0:(a&&j(e,p)||w(e,p)||C(e,p)?y=!0:F(e)?(y=!0,(null!==e.tag||null!==e.anchor)&&f(e,"alias node should not have any properties")):b(e,p,B===n)&&(y=!0,null===e.tag&&(e.tag="?")),null!==e.anchor&&(e.anchorMap[e.anchor]=e.result)):y=s&&I(e,h)),null!==e.tag&&"!"!==e.tag)if("?"===e.tag){for(c=0,u=e.implicitTypes.length;u>c;c+=1)if(l=e.implicitTypes[c],l.resolve(e.result)){e.result=l.construct(e.result),e.tag=l.tag;break}}else $.call(e.typeMap,e.tag)?(l=e.typeMap[e.tag],null!==e.result&&l.kind!==e.kind&&f(e,"unacceptable node kind for !<"+e.tag+'> tag; it should be "'+l.kind+'", not "'+e.kind+'"'),l.resolve(e.result)?e.result=l.construct(e.result):f(e,"cannot resolve a node with !<"+e.tag+"> explicit tag")):d(e,"unknown tag !<"+e.tag+">");return null!==e.tag||null!==e.anchor||y}function T(e){var t,o,a,s,c=e.position,u=!1;for(e.version=null,e.checkLineBreaks=e.legacy,e.tagMap={},e.anchorMap={};0!==(s=e.input.charCodeAt(e.position))&&(x(e,!0,-1),s=e.input.charCodeAt(e.position),!(e.lineIndent>0||37!==s));){for(u=!0,s=e.input.charCodeAt(++e.position),t=e.position;0!==s&&!r(s);)s=e.input.charCodeAt(++e.position);for(o=e.input.slice(t,e.position),a=[],o.length<1&&f(e,"directive name must not be less than one character in length");0!==s;){for(;i(s);)s=e.input.charCodeAt(++e.position);if(35===s){do s=e.input.charCodeAt(++e.position);while(0!==s&&!n(s));break}if(n(s))break;for(t=e.position;0!==s&&!r(s);)s=e.input.charCodeAt(++e.position);a.push(e.input.slice(t,e.position))}0!==s&&y(e),$.call(ot,o)?ot[o](e,o,a):d(e,'unknown document directive "'+o+'"')}return x(e,!0,-1),0===e.lineIndent&&45===e.input.charCodeAt(e.position)&&45===e.input.charCodeAt(e.position+1)&&45===e.input.charCodeAt(e.position+2)?(e.position+=3,x(e,!0,-1)):u&&f(e,"directives end mark is expected"),N(e,e.lineIndent-1,W,!1,!0),x(e,!0,-1),e.checkLineBreaks&&Q.test(e.input.slice(c,e.position))&&d(e,"non-ASCII line breaks are interpreted as content"),e.documents.push(e.result),e.position===e.lineStart&&v(e)?void(46===e.input.charCodeAt(e.position)&&(e.position+=3,x(e,!0,-1))):void(e.positioni;i+=1)t(o[i])}function M(e,t){var n=L(e,t);if(0===n.length)return void 0;if(1===n.length)return n[0];throw new D("expected a single document in the stream, but found more")}function Y(e,t,n){_(e,t,U.extend({schema:R},n))}function q(e,t){return M(e,U.extend({schema:R},t))}for(var U=e("./common"),D=e("./exception"),P=e("./mark"),R=e("./schema/default_safe"),H=e("./schema/default_full"),$=Object.prototype.hasOwnProperty,B=1,G=2,V=3,W=4,Z=1,J=2,z=3,K=/[\x00-\x08\x0B\x0C\x0E-\x1F\x7F-\x84\x86-\x9F\uD800-\uDFFF\uFFFE\uFFFF]/,Q=/[\x85\u2028\u2029]/,X=/[,\[\]\{\}]/,et=/^(?:!|!!|![a-z\-]+!)$/i,tt=/^(?:!|[^,\[\]\{\}])(?:%[0-9a-f]{2}|[0-9a-z\-#;\/\?:@&=\+\$,_\.!~\*'\(\)\[\]])*$/i,nt=new Array(256),it=new Array(256),rt=0;256>rt;rt++)nt[rt]=u(rt)?1:0,it[rt]=u(rt);var ot={YAML:function(e,t,n){var i,r,o;null!==e.version&&f(e,"duplication of %YAML directive"),1!==n.length&&f(e,"YAML directive accepts exactly one argument"),i=/^([0-9]+)\.([0-9]+)$/.exec(n[0]),null===i&&f(e,"ill-formed argument of the YAML directive"),r=parseInt(i[1],10),o=parseInt(i[2],10),1!==r&&f(e,"unacceptable YAML version of the document"),e.version=n[0],e.checkLineBreaks=2>o,1!==o&&2!==o&&d(e,"unsupported YAML version of the document")},TAG:function(e,t,n){var i,r;2!==n.length&&f(e,"TAG directive accepts exactly two arguments"),i=n[0],r=n[1],et.test(i)||f(e,"ill-formed tag handle (first argument) of the TAG directive"),$.call(e.tagMap,i)&&f(e,'there is a previously declared suffix for "'+i+'" tag handle'),tt.test(r)||f(e,"ill-formed tag prefix (second argument) of the TAG directive"),e.tagMap[i]=r}};t.exports.loadAll=_,t.exports.load=M,t.exports.safeLoadAll=Y,t.exports.safeLoad=q},{"./common":3,"./exception":5,"./mark":7,"./schema/default_full":10,"./schema/default_safe":11}],7:[function(e,t){"use strict";function n(e,t,n,i,r){this.name=e,this.buffer=t,this.position=n,this.line=i,this.column=r}var i=e("./common");n.prototype.getSnippet=function(e,t){var n,r,o,a,s;if(!this.buffer)return null;for(e=e||4,t=t||75,n="",r=this.position;r>0&&-1==="\x00\r\n
\u2028\u2029".indexOf(this.buffer.charAt(r-1));)if(r-=1,this.position-r>t/2-1){n=" ... ",r+=5;break}for(o="",a=this.position;at/2-1){o=" ... ",a-=5;break}return s=this.buffer.slice(r,a),i.repeat(" ",e)+n+s+o+"\n"+i.repeat(" ",e+this.position-r+n.length)+"^"},n.prototype.toString=function(e){var t,n="";return this.name&&(n+='in "'+this.name+'" '),n+="at line "+(this.line+1)+", column "+(this.column+1),e||(t=this.getSnippet(),t&&(n+=":\n"+t)),n},t.exports=n},{"./common":3}],8:[function(e,t){"use strict";function n(e,t,i){var r=[];return e.include.forEach(function(e){i=n(e,t,i)}),e[t].forEach(function(e){i.forEach(function(t,n){t.tag===e.tag&&r.push(n)}),i.push(e)}),i.filter(function(e,t){return-1===r.indexOf(t)})}function i(){function e(e){i[e.tag]=e}var t,n,i={};for(t=0,n=arguments.length;n>t;t+=1)arguments[t].forEach(e);return i}function r(e){this.include=e.include||[],this.implicit=e.implicit||[],this.explicit=e.explicit||[],this.implicit.forEach(function(e){if(e.loadKind&&"scalar"!==e.loadKind)throw new a("There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported.")}),this.compiledImplicit=n(this,"implicit",[]),this.compiledExplicit=n(this,"explicit",[]),this.compiledTypeMap=i(this.compiledImplicit,this.compiledExplicit)}var o=e("./common"),a=e("./exception"),s=e("./type");r.DEFAULT=null,r.create=function(){var e,t;switch(arguments.length){case 1:e=r.DEFAULT,t=arguments[0];break;case 2:e=arguments[0],t=arguments[1];break;default:throw new a("Wrong number of arguments for Schema.create function")}if(e=o.toArray(e),t=o.toArray(t),!e.every(function(e){return e instanceof r}))throw new a("Specified list of super schemas (or a single Schema object) contains a non-Schema object.");if(!t.every(function(e){return e instanceof s}))throw new a("Specified list of YAML types (or a single Type object) contains a non-Type object.");return new r({include:e,explicit:t})},t.exports=r},{"./common":3,"./exception":5,"./type":14}],9:[function(e,t){"use strict";var n=e("../schema");t.exports=new n({include:[e("./json")]})},{"../schema":8,"./json":13}],10:[function(e,t){"use strict";var n=e("../schema");t.exports=n.DEFAULT=new n({include:[e("./default_safe")],explicit:[e("../type/js/undefined"),e("../type/js/regexp"),e("../type/js/function")]})},{"../schema":8,"../type/js/function":19,"../type/js/regexp":20,"../type/js/undefined":21,"./default_safe":11}],11:[function(e,t){"use strict";var n=e("../schema");t.exports=new n({include:[e("./core")],implicit:[e("../type/timestamp"),e("../type/merge")],explicit:[e("../type/binary"),e("../type/omap"),e("../type/pairs"),e("../type/set")]})},{"../schema":8,"../type/binary":15,"../type/merge":23,"../type/omap":25,"../type/pairs":26,"../type/set":28,"../type/timestamp":30,"./core":9}],12:[function(e,t){"use strict";var n=e("../schema");t.exports=new n({explicit:[e("../type/str"),e("../type/seq"),e("../type/map")]})},{"../schema":8,"../type/map":22,"../type/seq":27,"../type/str":29}],13:[function(e,t){"use strict";var n=e("../schema");t.exports=new n({include:[e("./failsafe")],implicit:[e("../type/null"),e("../type/bool"),e("../type/int"),e("../type/float")]})},{"../schema":8,"../type/bool":16,"../type/float":17,"../type/int":18,"../type/null":24,"./failsafe":12}],14:[function(e,t){"use strict";function n(e){var t={};return null!==e&&Object.keys(e).forEach(function(n){e[n].forEach(function(e){t[String(e)]=n})}),t}function i(e,t){if(t=t||{},Object.keys(t).forEach(function(t){if(-1===o.indexOf(t))throw new r('Unknown option "'+t+'" is met in definition of "'+e+'" YAML type.')}),this.tag=e,this.kind=t.kind||null,this.resolve=t.resolve||function(){return!0},this.construct=t.construct||function(e){return e},this.instanceOf=t.instanceOf||null,this.predicate=t.predicate||null,this.represent=t.represent||null,this.defaultStyle=t.defaultStyle||null,this.styleAliases=n(t.styleAliases||null),-1===a.indexOf(this.kind))throw new r('Unknown kind "'+this.kind+'" is specified for "'+e+'" YAML type.')}var r=e("./exception"),o=["kind","resolve","construct","instanceOf","predicate","represent","defaultStyle","styleAliases"],a=["scalar","sequence","mapping"];t.exports=i},{"./exception":5}],15:[function(e,t){"use strict";function n(e){var t,n,i=0,r=e.length;for(n=0,i=0;r>i;i+=1)if(t=e.charCodeAt(i),10!==t&&13!==t){if(-1===u[127&t])return!1;n+=6,n>=8&&(n-=8)}return n?!1:!0}function i(e){var t,n,i,r,o=0,s=e.length,l=[];for(i=0,r=0,o=0;s>o;o+=1)n=e.charCodeAt(o),t=u[127&n],10!==n&&13!==n&&(r=r<<6|t,i+=6,i>=8&&(i-=8,c!==e.charAt(o)&&l.push(r>>i&255),r&=(1<t;t+=3)r+=l[e[t+0]>>2],r+=l[((3&e[t+0])<<4)+(e[t+1]>>4)],r+=l[((15&e[t+1])<<2)+(e[t+2]>>6)],r+=l[63&e[t+2]];return i=e.length%3,0!==i&&(t=e.length-i,r+=l[e[t+0]>>2],2===i?(r+=l[((3&e[t+0])<<4)+(e[t+1]>>4)],r+=l[(15&e[t+1])<<2],r+=c):(r+=l[(3&e[t+0])<<4],r+=c+c)),r}function o(e){return a&&a.isBuffer(e)}var a=e("buffer").Buffer,s=e("../type"),c="=",u=[-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,62,-1,-1,-1,63,52,53,54,55,56,57,58,59,60,61,-1,-1,-1,0,-1,-1,-1,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,-1,-1,-1,-1,-1,-1,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,-1,-1,-1,-1,-1],l="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("");t.exports=new s("tag:yaml.org,2002:binary",{kind:"scalar",resolve:n,construct:i,predicate:o,represent:r})},{"../type":14,buffer:31}],16:[function(e,t){"use strict";function n(e){var t=e.length;return 4===t&&("true"===e||"True"===e||"TRUE"===e)||5===t&&("false"===e||"False"===e||"FALSE"===e)}function i(e){return"true"===e||"True"===e||"TRUE"===e}function r(e){return"[object Boolean]"===Object.prototype.toString.call(e)}var o=e("../type");t.exports=new o("tag:yaml.org,2002:bool",{kind:"scalar",resolve:n,construct:i,predicate:r,represent:{lowercase:function(e){return e?"true":"false"},uppercase:function(e){return e?"TRUE":"FALSE"},camelcase:function(e){return e?"True":"False"}},defaultStyle:"lowercase"})},{"../type":14}],17:[function(e,t){"use strict";function n(e){return c.test(e)?!0:!1}function i(e){var t,n,i,r;return t=e.replace(/_/g,"").toLowerCase(),n="-"===t[0]?-1:1,r=[],0<="+-".indexOf(t[0])&&(t=t.slice(1)),".inf"===t?1===n?Number.POSITIVE_INFINITY:Number.NEGATIVE_INFINITY:".nan"===t?0/0:0<=t.indexOf(":")?(t.split(":").forEach(function(e){r.unshift(parseFloat(e,10))}),t=0,i=1,r.forEach(function(e){t+=e*i,i*=60}),n*t):n*parseFloat(t,10)}function r(e,t){if(isNaN(e))switch(t){case"lowercase":return".nan";case"uppercase":return".NAN";case"camelcase":return".NaN"}else if(Number.POSITIVE_INFINITY===e)switch(t){case"lowercase":return".inf";case"uppercase":return".INF";case"camelcase":return".Inf"}else{if(Number.NEGATIVE_INFINITY!==e)return a.isNegativeZero(e)?"-0.0":e.toString(10);switch(t){case"lowercase":return"-.inf";case"uppercase":return"-.INF";case"camelcase":return"-.Inf"}}}function o(e){return"[object Number]"===Object.prototype.toString.call(e)&&(0!==e%1||a.isNegativeZero(e))}var a=e("../common"),s=e("../type"),c=new RegExp("^(?:[-+]?(?:[0-9][0-9_]*)\\.[0-9_]*(?:[eE][-+][0-9]+)?|\\.[0-9_]+(?:[eE][-+][0-9]+)?|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*|[-+]?\\.(?:inf|Inf|INF)|\\.(?:nan|NaN|NAN))$");t.exports=new s("tag:yaml.org,2002:float",{kind:"scalar",resolve:n,construct:i,predicate:o,represent:r,defaultStyle:"lowercase"})},{"../common":3,"../type":14}],18:[function(e,t){"use strict";function n(e){return e>=48&&57>=e||e>=65&&70>=e||e>=97&&102>=e}function i(e){return e>=48&&55>=e}function r(e){return e>=48&&57>=e}function o(e){var t,o=e.length,a=0,s=!1;if(!o)return!1;if(t=e[a],("-"===t||"+"===t)&&(t=e[++a]),"0"===t){if(a+1===o)return!0;if(t=e[++a],"b"===t){for(a++;o>a;a++)if(t=e[a],"_"!==t){if("0"!==t&&"1"!==t)return!1;s=!0}return s}if("x"===t){for(a++;o>a;a++)if(t=e[a],"_"!==t){if(!n(e.charCodeAt(a)))return!1;s=!0}return s}for(;o>a;a++)if(t=e[a],"_"!==t){if(!i(e.charCodeAt(a)))return!1;s=!0}return s}for(;o>a;a++)if(t=e[a],"_"!==t){if(":"===t)break;if(!r(e.charCodeAt(a)))return!1;s=!0}return s?":"!==t?!0:/^(:[0-5]?[0-9])+$/.test(e.slice(a)):!1}function a(e){var t,n,i=e,r=1,o=[];return-1!==i.indexOf("_")&&(i=i.replace(/_/g,"")),t=i[0],("-"===t||"+"===t)&&("-"===t&&(r=-1),i=i.slice(1),t=i[0]),"0"===i?0:"0"===t?"b"===i[1]?r*parseInt(i.slice(2),2):"x"===i[1]?r*parseInt(i,16):r*parseInt(i,8):-1!==i.indexOf(":")?(i.split(":").forEach(function(e){o.unshift(parseInt(e,10))}),i=0,n=1,o.forEach(function(e){i+=e*n,n*=60}),r*i):r*parseInt(i,10)}function s(e){return"[object Number]"===Object.prototype.toString.call(e)&&0===e%1&&!c.isNegativeZero(e)}var c=e("../common"),u=e("../type");t.exports=new u("tag:yaml.org,2002:int",{kind:"scalar",resolve:o,construct:a,predicate:s,represent:{binary:function(e){return"0b"+e.toString(2)
-},octal:function(e){return"0"+e.toString(8)},decimal:function(e){return e.toString(10)},hexadecimal:function(e){return"0x"+e.toString(16).toUpperCase()}},defaultStyle:"decimal",styleAliases:{binary:[2,"bin"],octal:[8,"oct"],decimal:[10,"dec"],hexadecimal:[16,"hex"]}})},{"../common":3,"../type":14}],19:[function(e,t){"use strict";function n(e){try{var t="("+e+")",n=a.parse(t,{range:!0});return"Program"!==n.type||1!==n.body.length||"ExpressionStatement"!==n.body[0].type||"FunctionExpression"!==n.body[0].expression.type?!1:!0}catch(i){return!1}}function i(e){var t,n="("+e+")",i=a.parse(n,{range:!0}),r=[];if("Program"!==i.type||1!==i.body.length||"ExpressionStatement"!==i.body[0].type||"FunctionExpression"!==i.body[0].expression.type)throw new Error("Failed to resolve function");return i.body[0].expression.params.forEach(function(e){r.push(e.name)}),t=i.body[0].expression.body.range,new Function(r,n.slice(t[0]+1,t[1]-1))}function r(e){return e.toString()}function o(e){return"[object Function]"===Object.prototype.toString.call(e)}var a;try{a=e("esprima")}catch(s){"undefined"!=typeof window&&(a=window.esprima)}var c=e("../../type");t.exports=new c("tag:yaml.org,2002:js/function",{kind:"scalar",resolve:n,construct:i,predicate:o,represent:r})},{"../../type":14,esprima:"Lkr711"}],20:[function(e,t){"use strict";function n(e){var t=e,n=/\/([gim]*)$/.exec(e),i="";if("/"===t[0]){if(n&&(i=n[1]),i.length>3)return!1;if("/"!==t[t.length-i.length-1])return!1;t=t.slice(1,t.length-i.length-1)}try{{new RegExp(t,i)}return!0}catch(r){return!1}}function i(e){var t=e,n=/\/([gim]*)$/.exec(e),i="";return"/"===t[0]&&(n&&(i=n[1]),t=t.slice(1,t.length-i.length-1)),new RegExp(t,i)}function r(e){var t="/"+e.source+"/";return e.global&&(t+="g"),e.multiline&&(t+="m"),e.ignoreCase&&(t+="i"),t}function o(e){return"[object RegExp]"===Object.prototype.toString.call(e)}var a=e("../../type");t.exports=new a("tag:yaml.org,2002:js/regexp",{kind:"scalar",resolve:n,construct:i,predicate:o,represent:r})},{"../../type":14}],21:[function(e,t){"use strict";function n(){return!0}function i(){return void 0}function r(){return""}function o(e){return"undefined"==typeof e}var a=e("../../type");t.exports=new a("tag:yaml.org,2002:js/undefined",{kind:"scalar",resolve:n,construct:i,predicate:o,represent:r})},{"../../type":14}],22:[function(e,t){"use strict";var n=e("../type");t.exports=new n("tag:yaml.org,2002:map",{kind:"mapping"})},{"../type":14}],23:[function(e,t){"use strict";function n(e){return"<<"===e}var i=e("../type");t.exports=new i("tag:yaml.org,2002:merge",{kind:"scalar",resolve:n})},{"../type":14}],24:[function(e,t){"use strict";function n(e){var t=e.length;return 1===t&&"~"===e||4===t&&("null"===e||"Null"===e||"NULL"===e)}function i(){return null}function r(e){return null===e}var o=e("../type");t.exports=new o("tag:yaml.org,2002:null",{kind:"scalar",resolve:n,construct:i,predicate:r,represent:{canonical:function(){return"~"},lowercase:function(){return"null"},uppercase:function(){return"NULL"},camelcase:function(){return"Null"}},defaultStyle:"lowercase"})},{"../type":14}],25:[function(e,t){"use strict";function n(e){var t,n,i,a,s,c=[],u=e;for(t=0,n=u.length;n>t;t+=1){if(i=u[t],s=!1,"[object Object]"!==o.call(i))return!1;for(a in i)if(r.call(i,a)){if(s)return!1;s=!0}if(!s)return!1;if(-1!==c.indexOf(a))return!1;c.push(a)}return!0}var i=e("../type"),r=Object.prototype.hasOwnProperty,o=Object.prototype.toString;t.exports=new i("tag:yaml.org,2002:omap",{kind:"sequence",resolve:n})},{"../type":14}],26:[function(e,t){"use strict";function n(e){var t,n,i,r,a,s=e;for(a=new Array(s.length),t=0,n=s.length;n>t;t+=1){if(i=s[t],"[object Object]"!==o.call(i))return!1;if(r=Object.keys(i),1!==r.length)return!1;a[t]=[r[0],i[r[0]]]}return!0}function i(e){var t,n,i,r,o,a=e;for(o=new Array(a.length),t=0,n=a.length;n>t;t+=1)i=a[t],r=Object.keys(i),o[t]=[r[0],i[r[0]]];return o}var r=e("../type"),o=Object.prototype.toString;t.exports=new r("tag:yaml.org,2002:pairs",{kind:"sequence",resolve:n,construct:i})},{"../type":14}],27:[function(e,t){"use strict";var n=e("../type");t.exports=new n("tag:yaml.org,2002:seq",{kind:"sequence"})},{"../type":14}],28:[function(e,t){"use strict";function n(e){var t,n=e;for(t in n)if(r.call(n,t)&&null!==n[t])return!1;return!0}var i=e("../type"),r=Object.prototype.hasOwnProperty;t.exports=new i("tag:yaml.org,2002:set",{kind:"mapping",resolve:n})},{"../type":14}],29:[function(e,t){"use strict";var n=e("../type");t.exports=new n("tag:yaml.org,2002:str",{kind:"scalar"})},{"../type":14}],30:[function(e,t){"use strict";function n(e){var t;return t=a.exec(e),null===t?!1:!0}function i(e){var t,n,i,r,o,s,c,u,l,p,f=0,d=null;if(t=a.exec(e),null===t)throw new Error("Date resolve error");if(n=+t[1],i=+t[2]-1,r=+t[3],!t[4])return new Date(Date.UTC(n,i,r));if(o=+t[4],s=+t[5],c=+t[6],t[7]){for(f=t[7].slice(0,3);f.length<3;)f+="0";f=+f}return t[9]&&(u=+t[10],l=+(t[11]||0),d=6e4*(60*u+l),"-"===t[9]&&(d=-d)),p=new Date(Date.UTC(n,i,r,o,s,c,f)),d&&p.setTime(p.getTime()-d),p}function r(e){return e.toISOString()}var o=e("../type"),a=new RegExp("^([0-9][0-9][0-9][0-9])-([0-9][0-9]?)-([0-9][0-9]?)(?:(?:[Tt]|[ \\t]+)([0-9][0-9]?):([0-9][0-9]):([0-9][0-9])(?:\\.([0-9]*))?(?:[ \\t]*(Z|([-+])([0-9][0-9]?)(?::([0-9][0-9]))?))?)?$");t.exports=new o("tag:yaml.org,2002:timestamp",{kind:"scalar",resolve:n,construct:i,instanceOf:Date,represent:r})},{"../type":14}],31:[function(){},{}]},{},[1])(1)});
diff --git a/stackalytics/dashboard/static/js/md5.js b/stackalytics/dashboard/static/js/md5.js
deleted file mode 100644
index 46d2aab7d..000000000
--- a/stackalytics/dashboard/static/js/md5.js
+++ /dev/null
@@ -1,256 +0,0 @@
-/*
- * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message
- * Digest Algorithm, as defined in RFC 1321.
- * Version 2.1 Copyright (C) Paul Johnston 1999 - 2002.
- * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet
- * Distributed under the BSD License
- * See http://pajhome.org.uk/crypt/md5 for more info.
- */
-
-/*
- * Configurable variables. You may need to tweak these to be compatible with
- * the server-side, but the defaults work in most cases.
- */
-var hexcase = 0; /* hex output format. 0 - lowercase; 1 - uppercase */
-var b64pad = ""; /* base-64 pad character. "=" for strict RFC compliance */
-var chrsz = 8; /* bits per input character. 8 - ASCII; 16 - Unicode */
-
-/*
- * These are the functions you'll usually want to call
- * They take string arguments and return either hex or base-64 encoded strings
- */
-function hex_md5(s){ return binl2hex(core_md5(str2binl(s), s.length * chrsz));}
-function b64_md5(s){ return binl2b64(core_md5(str2binl(s), s.length * chrsz));}
-function str_md5(s){ return binl2str(core_md5(str2binl(s), s.length * chrsz));}
-function hex_hmac_md5(key, data) { return binl2hex(core_hmac_md5(key, data)); }
-function b64_hmac_md5(key, data) { return binl2b64(core_hmac_md5(key, data)); }
-function str_hmac_md5(key, data) { return binl2str(core_hmac_md5(key, data)); }
-
-/*
- * Perform a simple self-test to see if the VM is working
- */
-function md5_vm_test()
-{
- return hex_md5("abc") == "900150983cd24fb0d6963f7d28e17f72";
-}
-
-/*
- * Calculate the MD5 of an array of little-endian words, and a bit length
- */
-function core_md5(x, len)
-{
- /* append padding */
- x[len >> 5] |= 0x80 << ((len) % 32);
- x[(((len + 64) >>> 9) << 4) + 14] = len;
-
- var a = 1732584193;
- var b = -271733879;
- var c = -1732584194;
- var d = 271733878;
-
- for(var i = 0; i < x.length; i += 16)
- {
- var olda = a;
- var oldb = b;
- var oldc = c;
- var oldd = d;
-
- a = md5_ff(a, b, c, d, x[i+ 0], 7 , -680876936);
- d = md5_ff(d, a, b, c, x[i+ 1], 12, -389564586);
- c = md5_ff(c, d, a, b, x[i+ 2], 17, 606105819);
- b = md5_ff(b, c, d, a, x[i+ 3], 22, -1044525330);
- a = md5_ff(a, b, c, d, x[i+ 4], 7 , -176418897);
- d = md5_ff(d, a, b, c, x[i+ 5], 12, 1200080426);
- c = md5_ff(c, d, a, b, x[i+ 6], 17, -1473231341);
- b = md5_ff(b, c, d, a, x[i+ 7], 22, -45705983);
- a = md5_ff(a, b, c, d, x[i+ 8], 7 , 1770035416);
- d = md5_ff(d, a, b, c, x[i+ 9], 12, -1958414417);
- c = md5_ff(c, d, a, b, x[i+10], 17, -42063);
- b = md5_ff(b, c, d, a, x[i+11], 22, -1990404162);
- a = md5_ff(a, b, c, d, x[i+12], 7 , 1804603682);
- d = md5_ff(d, a, b, c, x[i+13], 12, -40341101);
- c = md5_ff(c, d, a, b, x[i+14], 17, -1502002290);
- b = md5_ff(b, c, d, a, x[i+15], 22, 1236535329);
-
- a = md5_gg(a, b, c, d, x[i+ 1], 5 , -165796510);
- d = md5_gg(d, a, b, c, x[i+ 6], 9 , -1069501632);
- c = md5_gg(c, d, a, b, x[i+11], 14, 643717713);
- b = md5_gg(b, c, d, a, x[i+ 0], 20, -373897302);
- a = md5_gg(a, b, c, d, x[i+ 5], 5 , -701558691);
- d = md5_gg(d, a, b, c, x[i+10], 9 , 38016083);
- c = md5_gg(c, d, a, b, x[i+15], 14, -660478335);
- b = md5_gg(b, c, d, a, x[i+ 4], 20, -405537848);
- a = md5_gg(a, b, c, d, x[i+ 9], 5 , 568446438);
- d = md5_gg(d, a, b, c, x[i+14], 9 , -1019803690);
- c = md5_gg(c, d, a, b, x[i+ 3], 14, -187363961);
- b = md5_gg(b, c, d, a, x[i+ 8], 20, 1163531501);
- a = md5_gg(a, b, c, d, x[i+13], 5 , -1444681467);
- d = md5_gg(d, a, b, c, x[i+ 2], 9 , -51403784);
- c = md5_gg(c, d, a, b, x[i+ 7], 14, 1735328473);
- b = md5_gg(b, c, d, a, x[i+12], 20, -1926607734);
-
- a = md5_hh(a, b, c, d, x[i+ 5], 4 , -378558);
- d = md5_hh(d, a, b, c, x[i+ 8], 11, -2022574463);
- c = md5_hh(c, d, a, b, x[i+11], 16, 1839030562);
- b = md5_hh(b, c, d, a, x[i+14], 23, -35309556);
- a = md5_hh(a, b, c, d, x[i+ 1], 4 , -1530992060);
- d = md5_hh(d, a, b, c, x[i+ 4], 11, 1272893353);
- c = md5_hh(c, d, a, b, x[i+ 7], 16, -155497632);
- b = md5_hh(b, c, d, a, x[i+10], 23, -1094730640);
- a = md5_hh(a, b, c, d, x[i+13], 4 , 681279174);
- d = md5_hh(d, a, b, c, x[i+ 0], 11, -358537222);
- c = md5_hh(c, d, a, b, x[i+ 3], 16, -722521979);
- b = md5_hh(b, c, d, a, x[i+ 6], 23, 76029189);
- a = md5_hh(a, b, c, d, x[i+ 9], 4 , -640364487);
- d = md5_hh(d, a, b, c, x[i+12], 11, -421815835);
- c = md5_hh(c, d, a, b, x[i+15], 16, 530742520);
- b = md5_hh(b, c, d, a, x[i+ 2], 23, -995338651);
-
- a = md5_ii(a, b, c, d, x[i+ 0], 6 , -198630844);
- d = md5_ii(d, a, b, c, x[i+ 7], 10, 1126891415);
- c = md5_ii(c, d, a, b, x[i+14], 15, -1416354905);
- b = md5_ii(b, c, d, a, x[i+ 5], 21, -57434055);
- a = md5_ii(a, b, c, d, x[i+12], 6 , 1700485571);
- d = md5_ii(d, a, b, c, x[i+ 3], 10, -1894986606);
- c = md5_ii(c, d, a, b, x[i+10], 15, -1051523);
- b = md5_ii(b, c, d, a, x[i+ 1], 21, -2054922799);
- a = md5_ii(a, b, c, d, x[i+ 8], 6 , 1873313359);
- d = md5_ii(d, a, b, c, x[i+15], 10, -30611744);
- c = md5_ii(c, d, a, b, x[i+ 6], 15, -1560198380);
- b = md5_ii(b, c, d, a, x[i+13], 21, 1309151649);
- a = md5_ii(a, b, c, d, x[i+ 4], 6 , -145523070);
- d = md5_ii(d, a, b, c, x[i+11], 10, -1120210379);
- c = md5_ii(c, d, a, b, x[i+ 2], 15, 718787259);
- b = md5_ii(b, c, d, a, x[i+ 9], 21, -343485551);
-
- a = safe_add(a, olda);
- b = safe_add(b, oldb);
- c = safe_add(c, oldc);
- d = safe_add(d, oldd);
- }
- return Array(a, b, c, d);
-
-}
-
-/*
- * These functions implement the four basic operations the algorithm uses.
- */
-function md5_cmn(q, a, b, x, s, t)
-{
- return safe_add(bit_rol(safe_add(safe_add(a, q), safe_add(x, t)), s),b);
-}
-function md5_ff(a, b, c, d, x, s, t)
-{
- return md5_cmn((b & c) | ((~b) & d), a, b, x, s, t);
-}
-function md5_gg(a, b, c, d, x, s, t)
-{
- return md5_cmn((b & d) | (c & (~d)), a, b, x, s, t);
-}
-function md5_hh(a, b, c, d, x, s, t)
-{
- return md5_cmn(b ^ c ^ d, a, b, x, s, t);
-}
-function md5_ii(a, b, c, d, x, s, t)
-{
- return md5_cmn(c ^ (b | (~d)), a, b, x, s, t);
-}
-
-/*
- * Calculate the HMAC-MD5, of a key and some data
- */
-function core_hmac_md5(key, data)
-{
- var bkey = str2binl(key);
- if(bkey.length > 16) bkey = core_md5(bkey, key.length * chrsz);
-
- var ipad = Array(16), opad = Array(16);
- for(var i = 0; i < 16; i++)
- {
- ipad[i] = bkey[i] ^ 0x36363636;
- opad[i] = bkey[i] ^ 0x5C5C5C5C;
- }
-
- var hash = core_md5(ipad.concat(str2binl(data)), 512 + data.length * chrsz);
- return core_md5(opad.concat(hash), 512 + 128);
-}
-
-/*
- * Add integers, wrapping at 2^32. This uses 16-bit operations internally
- * to work around bugs in some JS interpreters.
- */
-function safe_add(x, y)
-{
- var lsw = (x & 0xFFFF) + (y & 0xFFFF);
- var msw = (x >> 16) + (y >> 16) + (lsw >> 16);
- return (msw << 16) | (lsw & 0xFFFF);
-}
-
-/*
- * Bitwise rotate a 32-bit number to the left.
- */
-function bit_rol(num, cnt)
-{
- return (num << cnt) | (num >>> (32 - cnt));
-}
-
-/*
- * Convert a string to an array of little-endian words
- * If chrsz is ASCII, characters >255 have their hi-byte silently ignored.
- */
-function str2binl(str)
-{
- var bin = Array();
- var mask = (1 << chrsz) - 1;
- for(var i = 0; i < str.length * chrsz; i += chrsz)
- bin[i>>5] |= (str.charCodeAt(i / chrsz) & mask) << (i%32);
- return bin;
-}
-
-/*
- * Convert an array of little-endian words to a string
- */
-function binl2str(bin)
-{
- var str = "";
- var mask = (1 << chrsz) - 1;
- for(var i = 0; i < bin.length * 32; i += chrsz)
- str += String.fromCharCode((bin[i>>5] >>> (i % 32)) & mask);
- return str;
-}
-
-/*
- * Convert an array of little-endian words to a hex string.
- */
-function binl2hex(binarray)
-{
- var hex_tab = hexcase ? "0123456789ABCDEF" : "0123456789abcdef";
- var str = "";
- for(var i = 0; i < binarray.length * 4; i++)
- {
- str += hex_tab.charAt((binarray[i>>2] >> ((i%4)*8+4)) & 0xF) +
- hex_tab.charAt((binarray[i>>2] >> ((i%4)*8 )) & 0xF);
- }
- return str;
-}
-
-/*
- * Convert an array of little-endian words to a base-64 string
- */
-function binl2b64(binarray)
-{
- var tab = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
- var str = "";
- for(var i = 0; i < binarray.length * 4; i += 3)
- {
- var triplet = (((binarray[i >> 2] >> 8 * ( i %4)) & 0xFF) << 16)
- | (((binarray[i+1 >> 2] >> 8 * ((i+1)%4)) & 0xFF) << 8 )
- | ((binarray[i+2 >> 2] >> 8 * ((i+2)%4)) & 0xFF);
- for(var j = 0; j < 4; j++)
- {
- if(i * 8 + j * 6 > binarray.length * 32) str += b64pad;
- else str += tab.charAt((triplet >> 6*(3-j)) & 0x3F);
- }
- }
- return str;
-}
diff --git a/stackalytics/dashboard/static/js/select2.min.js b/stackalytics/dashboard/static/js/select2.min.js
deleted file mode 100644
index 1537edca0..000000000
--- a/stackalytics/dashboard/static/js/select2.min.js
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
-Copyright 2012 Igor Vaynberg
-
-Version: 3.4.1 Timestamp: Thu Jun 27 18:02:10 PDT 2013
-
-This software is licensed under the Apache License, Version 2.0 (the "Apache License") or the GNU
-General Public License version 2 (the "GPL License"). You may choose either license to govern your
-use of this software only upon the condition that you accept all of the terms of either the Apache
-License or the GPL License.
-
-You may obtain a copy of the Apache License and the GPL License at:
-
-http://www.apache.org/licenses/LICENSE-2.0
-http://www.gnu.org/licenses/gpl-2.0.html
-
-Unless required by applicable law or agreed to in writing, software distributed under the Apache License
-or the GPL Licesnse is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
-either express or implied. See the Apache License and the GPL License for the specific language governing
-permissions and limitations under the Apache License and the GPL License.
-*/
-(function(a){a.fn.each2===void 0&&a.fn.extend({each2:function(b){for(var c=a([0]),d=-1,e=this.length;e>++d&&(c.context=c[0]=this[d])&&b.call(c[0],d,c)!==!1;);return this}})})(jQuery),function(a,b){"use strict";function m(a,b){for(var c=0,d=b.length;d>c;c+=1)if(o(a,b[c]))return c;return-1}function n(){var b=a(l);b.appendTo("body");var c={width:b.width()-b[0].clientWidth,height:b.height()-b[0].clientHeight};return b.remove(),c}function o(a,c){return a===c?!0:a===b||c===b?!1:null===a||null===c?!1:a.constructor===String?a+""==c+"":c.constructor===String?c+""==a+"":!1}function p(b,c){var d,e,f;if(null===b||1>b.length)return[];for(d=b.split(c),e=0,f=d.length;f>e;e+=1)d[e]=a.trim(d[e]);return d}function q(a){return a.outerWidth(!1)-a.width()}function r(c){var d="keyup-change-value";c.on("keydown",function(){a.data(c,d)===b&&a.data(c,d,c.val())}),c.on("keyup",function(){var e=a.data(c,d);e!==b&&c.val()!==e&&(a.removeData(c,d),c.trigger("keyup-change"))})}function s(c){c.on("mousemove",function(c){var d=i;(d===b||d.x!==c.pageX||d.y!==c.pageY)&&a(c.target).trigger("mousemove-filtered",c)})}function t(a,c,d){d=d||b;var e;return function(){var b=arguments;window.clearTimeout(e),e=window.setTimeout(function(){c.apply(d,b)},a)}}function u(a){var c,b=!1;return function(){return b===!1&&(c=a(),b=!0),c}}function v(a,b){var c=t(a,function(a){b.trigger("scroll-debounced",a)});b.on("scroll",function(a){m(a.target,b.get())>=0&&c(a)})}function w(a){a[0]!==document.activeElement&&window.setTimeout(function(){var d,b=a[0],c=a.val().length;a.focus(),a.is(":visible")&&b===document.activeElement&&(b.setSelectionRange?b.setSelectionRange(c,c):b.createTextRange&&(d=b.createTextRange(),d.collapse(!1),d.select()))},0)}function x(b){b=a(b)[0];var c=0,d=0;if("selectionStart"in b)c=b.selectionStart,d=b.selectionEnd-c;else if("selection"in document){b.focus();var e=document.selection.createRange();d=document.selection.createRange().text.length,e.moveStart("character",-b.value.length),c=e.text.length-d}return{offset:c,length:d}}function y(a){a.preventDefault(),a.stopPropagation()}function z(a){a.preventDefault(),a.stopImmediatePropagation()}function A(b){if(!h){var c=b[0].currentStyle||window.getComputedStyle(b[0],null);h=a(document.createElement("div")).css({position:"absolute",left:"-10000px",top:"-10000px",display:"none",fontSize:c.fontSize,fontFamily:c.fontFamily,fontStyle:c.fontStyle,fontWeight:c.fontWeight,letterSpacing:c.letterSpacing,textTransform:c.textTransform,whiteSpace:"nowrap"}),h.attr("class","select2-sizer"),a("body").append(h)}return h.text(b.val()),h.width()}function B(b,c,d){var e,g,f=[];e=b.attr("class"),e&&(e=""+e,a(e.split(" ")).each2(function(){0===this.indexOf("select2-")&&f.push(this)})),e=c.attr("class"),e&&(e=""+e,a(e.split(" ")).each2(function(){0!==this.indexOf("select2-")&&(g=d(this),g&&f.push(this))})),b.attr("class",f.join(" "))}function C(a,c,d,e){var f=a.toUpperCase().indexOf(c.toUpperCase()),g=c.length;return 0>f?(d.push(e(a)),b):(d.push(e(a.substring(0,f))),d.push(""),d.push(e(a.substring(f,f+g))),d.push(" "),d.push(e(a.substring(f+g,a.length))),b)}function D(a){var b={"\\":"\","&":"&","<":"<",">":">",'"':""","'":"'","/":"/"};return(a+"").replace(/[&<>"'\/\\]/g,function(a){return b[a]})}function E(c){var d,e=0,f=null,g=c.quietMillis||100,h=c.url,i=this;return function(j){window.clearTimeout(d),d=window.setTimeout(function(){e+=1;var d=e,g=c.data,k=h,l=c.transport||a.fn.select2.ajaxDefaults.transport,m={type:c.type||"GET",cache:c.cache||!1,jsonpCallback:c.jsonpCallback||b,dataType:c.dataType||"json"},n=a.extend({},a.fn.select2.ajaxDefaults.params,m);g=g?g.call(i,j.term,j.page,j.context):null,k="function"==typeof k?k.call(i,j.term,j.page,j.context):k,f&&f.abort(),c.params&&(a.isFunction(c.params)?a.extend(n,c.params.call(i)):a.extend(n,c.params)),a.extend(n,{url:k,dataType:c.dataType,data:g,success:function(a){if(!(e>d)){var b=c.results(a,j.page);j.callback(b)}}}),f=l.call(i,n)},g)}}function F(c){var e,f,d=c,g=function(a){return""+a.text};a.isArray(d)&&(f=d,d={results:f}),a.isFunction(d)===!1&&(f=d,d=function(){return f});var h=d();return h.text&&(g=h.text,a.isFunction(g)||(e=h.text,g=function(a){return a[e]})),function(c){var h,e=c.term,f={results:[]};return""===e?(c.callback(d()),b):(h=function(b,d){var f,i;if(b=b[0],b.children){f={};for(i in b)b.hasOwnProperty(i)&&(f[i]=b[i]);f.children=[],a(b.children).each2(function(a,b){h(b,f.children)}),(f.children.length||c.matcher(e,g(f),b))&&d.push(f)}else c.matcher(e,g(b),b)&&d.push(b)},a(d().results).each2(function(a,b){h(b,f.results)}),c.callback(f),b)}}function G(c){var d=a.isFunction(c);return function(e){var f=e.term,g={results:[]};a(d?c():c).each(function(){var a=this.text!==b,c=a?this.text:this;(""===f||e.matcher(f,c))&&g.results.push(a?this:{id:this,text:this})}),e.callback(g)}}function H(b,c){if(a.isFunction(b))return!0;if(!b)return!1;throw Error(c+" must be a function or a falsy value")}function I(b){return a.isFunction(b)?b():b}function J(b){var c=0;return a.each(b,function(a,b){b.children?c+=J(b.children):c++}),c}function K(a,c,d,e){var h,i,j,k,l,f=a,g=!1;if(!e.createSearchChoice||!e.tokenSeparators||1>e.tokenSeparators.length)return b;for(;;){for(i=-1,j=0,k=e.tokenSeparators.length;k>j&&(l=e.tokenSeparators[j],i=a.indexOf(l),!(i>=0));j++);if(0>i)break;if(h=a.substring(0,i),a=a.substring(i+l.length),h.length>0&&(h=e.createSearchChoice.call(this,h,c),h!==b&&null!==h&&e.id(h)!==b&&null!==e.id(h))){for(g=!1,j=0,k=c.length;k>j;j++)if(o(e.id(h),e.id(c[j]))){g=!0;break}g||d(h)}}return f!==a?a:b}function L(b,c){var d=function(){};return d.prototype=new b,d.prototype.constructor=d,d.prototype.parent=b.prototype,d.prototype=a.extend(d.prototype,c),d}if(window.Select2===b){var c,d,e,f,g,h,j,k,i={x:0,y:0},c={TAB:9,ENTER:13,ESC:27,SPACE:32,LEFT:37,UP:38,RIGHT:39,DOWN:40,SHIFT:16,CTRL:17,ALT:18,PAGE_UP:33,PAGE_DOWN:34,HOME:36,END:35,BACKSPACE:8,DELETE:46,isArrow:function(a){switch(a=a.which?a.which:a){case c.LEFT:case c.RIGHT:case c.UP:case c.DOWN:return!0}return!1},isControl:function(a){var b=a.which;switch(b){case c.SHIFT:case c.CTRL:case c.ALT:return!0}return a.metaKey?!0:!1},isFunctionKey:function(a){return a=a.which?a.which:a,a>=112&&123>=a}},l="
";j=a(document),g=function(){var a=1;return function(){return a++}}(),j.on("mousemove",function(a){i.x=a.pageX,i.y=a.pageY}),d=L(Object,{bind:function(a){var b=this;return function(){a.apply(b,arguments)}},init:function(c){var d,e,h,i,f=".select2-results";this.opts=c=this.prepareOpts(c),this.id=c.id,c.element.data("select2")!==b&&null!==c.element.data("select2")&&c.element.data("select2").destroy(),this.container=this.createContainer(),this.containerId="s2id_"+(c.element.attr("id")||"autogen"+g()),this.containerSelector="#"+this.containerId.replace(/([;&,\.\+\*\~':"\!\^#$%@\[\]\(\)=>\|])/g,"\\$1"),this.container.attr("id",this.containerId),this.body=u(function(){return c.element.closest("body")}),B(this.container,this.opts.element,this.opts.adaptContainerCssClass),this.container.css(I(c.containerCss)),this.container.addClass(I(c.containerCssClass)),this.elementTabIndex=this.opts.element.attr("tabindex"),this.opts.element.data("select2",this).attr("tabindex","-1").before(this.container),this.container.data("select2",this),this.dropdown=this.container.find(".select2-drop"),this.dropdown.addClass(I(c.dropdownCssClass)),this.dropdown.data("select2",this),this.results=d=this.container.find(f),this.search=e=this.container.find("input.select2-input"),this.resultsPage=0,this.context=null,this.initContainer(),s(this.results),this.dropdown.on("mousemove-filtered touchstart touchmove touchend",f,this.bind(this.highlightUnderEvent)),v(80,this.results),this.dropdown.on("scroll-debounced",f,this.bind(this.loadMoreIfNeeded)),a(this.container).on("change",".select2-input",function(a){a.stopPropagation()}),a(this.dropdown).on("change",".select2-input",function(a){a.stopPropagation()}),a.fn.mousewheel&&d.mousewheel(function(a,b,c,e){var f=d.scrollTop();e>0&&0>=f-e?(d.scrollTop(0),y(a)):0>e&&d.get(0).scrollHeight-d.scrollTop()+e<=d.height()&&(d.scrollTop(d.get(0).scrollHeight-d.height()),y(a))}),r(e),e.on("keyup-change input paste",this.bind(this.updateResults)),e.on("focus",function(){e.addClass("select2-focused")}),e.on("blur",function(){e.removeClass("select2-focused")}),this.dropdown.on("mouseup",f,this.bind(function(b){a(b.target).closest(".select2-result-selectable").length>0&&(this.highlightUnderEvent(b),this.selectHighlighted(b))})),this.dropdown.on("click mouseup mousedown",function(a){a.stopPropagation()}),a.isFunction(this.opts.initSelection)&&(this.initSelection(),this.monitorSource()),null!==c.maximumInputLength&&this.search.attr("maxlength",c.maximumInputLength);var h=c.element.prop("disabled");h===b&&(h=!1),this.enable(!h);var i=c.element.prop("readonly");i===b&&(i=!1),this.readonly(i),k=k||n(),this.autofocus=c.element.prop("autofocus"),c.element.prop("autofocus",!1),this.autofocus&&this.focus()},destroy:function(){var a=this.opts.element,c=a.data("select2");this.propertyObserver&&(delete this.propertyObserver,this.propertyObserver=null),c!==b&&(c.container.remove(),c.dropdown.remove(),a.removeClass("select2-offscreen").removeData("select2").off(".select2").prop("autofocus",this.autofocus||!1),this.elementTabIndex?a.attr({tabindex:this.elementTabIndex}):a.removeAttr("tabindex"),a.show())},optionToData:function(a){return a.is("option")?{id:a.prop("value"),text:a.text(),element:a.get(),css:a.attr("class"),disabled:a.prop("disabled"),locked:o(a.attr("locked"),"locked")||o(a.data("locked"),!0)}:a.is("optgroup")?{text:a.attr("label"),children:[],element:a.get(),css:a.attr("class")}:b},prepareOpts:function(c){var d,e,f,g,h=this;if(d=c.element,"select"===d.get(0).tagName.toLowerCase()&&(this.select=e=c.element),e&&a.each(["id","multiple","ajax","query","createSearchChoice","initSelection","data","tags"],function(){if(this in c)throw Error("Option '"+this+"' is not allowed for Select2 when attached to a element.")}),c=a.extend({},{populateResults:function(d,e,f){var g,l=this.opts.id;g=function(d,e,i){var j,k,m,n,o,p,q,r,s,t;for(d=c.sortResults(d,e,f),j=0,k=d.length;k>j;j+=1)m=d[j],o=m.disabled===!0,n=!o&&l(m)!==b,p=m.children&&m.children.length>0,q=a(" "),q.addClass("select2-results-dept-"+i),q.addClass("select2-result"),q.addClass(n?"select2-result-selectable":"select2-result-unselectable"),o&&q.addClass("select2-disabled"),p&&q.addClass("select2-result-with-children"),q.addClass(h.opts.formatResultCssClass(m)),r=a(document.createElement("div")),r.addClass("select2-result-label"),t=c.formatResult(m,r,f,h.opts.escapeMarkup),t!==b&&r.html(t),q.append(r),p&&(s=a(""),s.addClass("select2-result-sub"),g(m.children,s,i+1),q.append(s)),q.data("select2-data",m),e.append(q)},g(e,d,0)}},a.fn.select2.defaults,c),"function"!=typeof c.id&&(f=c.id,c.id=function(a){return a[f]}),a.isArray(c.element.data("select2Tags"))){if("tags"in c)throw"tags specified as both an attribute 'data-select2-tags' and in options of Select2 "+c.element.attr("id");c.tags=c.element.data("select2Tags")}if(e?(c.query=this.bind(function(a){var f,g,i,c={results:[],more:!1},e=a.term;i=function(b,c){var d;b.is("option")?a.matcher(e,b.text(),b)&&c.push(h.optionToData(b)):b.is("optgroup")&&(d=h.optionToData(b),b.children().each2(function(a,b){i(b,d.children)}),d.children.length>0&&c.push(d))},f=d.children(),this.getPlaceholder()!==b&&f.length>0&&(g=this.getPlaceholderOption(),g&&(f=f.not(g))),f.each2(function(a,b){i(b,c.results)}),a.callback(c)}),c.id=function(a){return a.id},c.formatResultCssClass=function(a){return a.css}):"query"in c||("ajax"in c?(g=c.element.data("ajax-url"),g&&g.length>0&&(c.ajax.url=g),c.query=E.call(c.element,c.ajax)):"data"in c?c.query=F(c.data):"tags"in c&&(c.query=G(c.tags),c.createSearchChoice===b&&(c.createSearchChoice=function(a){return{id:a,text:a}}),c.initSelection===b&&(c.initSelection=function(d,e){var f=[];a(p(d.val(),c.separator)).each(function(){var d=this,e=this,g=c.tags;a.isFunction(g)&&(g=g()),a(g).each(function(){return o(this.id,d)?(e=this.text,!1):b}),f.push({id:d,text:e})}),e(f)}))),"function"!=typeof c.query)throw"query function not defined for Select2 "+c.element.attr("id");return c},monitorSource:function(){var c,a=this.opts.element;a.on("change.select2",this.bind(function(){this.opts.element.data("select2-change-triggered")!==!0&&this.initSelection()})),c=this.bind(function(){var d,f=a.prop("disabled");f===b&&(f=!1),this.enable(!f);var d=a.prop("readonly");d===b&&(d=!1),this.readonly(d),B(this.container,this.opts.element,this.opts.adaptContainerCssClass),this.container.addClass(I(this.opts.containerCssClass)),B(this.dropdown,this.opts.element,this.opts.adaptDropdownCssClass),this.dropdown.addClass(I(this.opts.dropdownCssClass))}),a.on("propertychange.select2 DOMAttrModified.select2",c),this.mutationCallback===b&&(this.mutationCallback=function(a){a.forEach(c)}),"undefined"!=typeof WebKitMutationObserver&&(this.propertyObserver&&(delete this.propertyObserver,this.propertyObserver=null),this.propertyObserver=new WebKitMutationObserver(this.mutationCallback),this.propertyObserver.observe(a.get(0),{attributes:!0,subtree:!1}))},triggerSelect:function(b){var c=a.Event("select2-selecting",{val:this.id(b),object:b});return this.opts.element.trigger(c),!c.isDefaultPrevented()},triggerChange:function(b){b=b||{},b=a.extend({},b,{type:"change",val:this.val()}),this.opts.element.data("select2-change-triggered",!0),this.opts.element.trigger(b),this.opts.element.data("select2-change-triggered",!1),this.opts.element.click(),this.opts.blurOnChange&&this.opts.element.blur()},isInterfaceEnabled:function(){return this.enabledInterface===!0},enableInterface:function(){var a=this._enabled&&!this._readonly,b=!a;return a===this.enabledInterface?!1:(this.container.toggleClass("select2-container-disabled",b),this.close(),this.enabledInterface=a,!0)},enable:function(a){return a===b&&(a=!0),this._enabled===a?!1:(this._enabled=a,this.opts.element.prop("disabled",!a),this.enableInterface(),!0)},readonly:function(a){return a===b&&(a=!1),this._readonly===a?!1:(this._readonly=a,this.opts.element.prop("readonly",a),this.enableInterface(),!0)},opened:function(){return this.container.hasClass("select2-dropdown-open")},positionDropdown:function(){var q,r,s,t,b=this.dropdown,c=this.container.offset(),d=this.container.outerHeight(!1),e=this.container.outerWidth(!1),f=b.outerHeight(!1),g=a(window).scrollLeft()+a(window).width(),h=a(window).scrollTop()+a(window).height(),i=c.top+d,j=c.left,l=h>=i+f,m=c.top-f>=this.body().scrollTop(),n=b.outerWidth(!1),o=g>=j+n,p=b.hasClass("select2-drop-above");this.opts.dropdownAutoWidth?(t=a(".select2-results",b)[0],b.addClass("select2-drop-auto-width"),b.css("width",""),n=b.outerWidth(!1)+(t.scrollHeight===t.clientHeight?0:k.width),n>e?e=n:n=e,o=g>=j+n):this.container.removeClass("select2-drop-auto-width"),"static"!==this.body().css("position")&&(q=this.body().offset(),i-=q.top,j-=q.left),p?(r=!0,!m&&l&&(r=!1)):(r=!1,!l&&m&&(r=!0)),o||(j=c.left+e-n),r?(i=c.top-f,this.container.addClass("select2-drop-above"),b.addClass("select2-drop-above")):(this.container.removeClass("select2-drop-above"),b.removeClass("select2-drop-above")),s=a.extend({top:i,left:j,width:e},I(this.opts.dropdownCss)),b.css(s)},shouldOpen:function(){var b;return this.opened()?!1:this._enabled===!1||this._readonly===!0?!1:(b=a.Event("select2-opening"),this.opts.element.trigger(b),!b.isDefaultPrevented())},clearDropdownAlignmentPreference:function(){this.container.removeClass("select2-drop-above"),this.dropdown.removeClass("select2-drop-above")},open:function(){return this.shouldOpen()?(this.opening(),!0):!1},opening:function(){function i(){return{width:Math.max(document.documentElement.scrollWidth,a(window).width()),height:Math.max(document.documentElement.scrollHeight,a(window).height())}}var f,g,b=this.containerId,c="scroll."+b,d="resize."+b,e="orientationchange."+b;this.container.addClass("select2-dropdown-open").addClass("select2-container-active"),this.clearDropdownAlignmentPreference(),this.dropdown[0]!==this.body().children().last()[0]&&this.dropdown.detach().appendTo(this.body()),f=a("#select2-drop-mask"),0==f.length&&(f=a(document.createElement("div")),f.attr("id","select2-drop-mask").attr("class","select2-drop-mask"),f.hide(),f.appendTo(this.body()),f.on("mousedown touchstart click",function(b){var d,c=a("#select2-drop");c.length>0&&(d=c.data("select2"),d.opts.selectOnBlur&&d.selectHighlighted({noFocus:!0}),d.close(),b.preventDefault(),b.stopPropagation())})),this.dropdown.prev()[0]!==f[0]&&this.dropdown.before(f),a("#select2-drop").removeAttr("id"),this.dropdown.attr("id","select2-drop"),g=i(),f.css(g).show(),this.dropdown.show(),this.positionDropdown(),this.dropdown.addClass("select2-drop-active");var h=this;this.container.parents().add(window).each(function(){a(this).on(d+" "+c+" "+e,function(){var c=i();a("#select2-drop-mask").css(c),h.positionDropdown()})})},close:function(){if(this.opened()){var b=this.containerId,c="scroll."+b,d="resize."+b,e="orientationchange."+b;this.container.parents().add(window).each(function(){a(this).off(c).off(d).off(e)}),this.clearDropdownAlignmentPreference(),a("#select2-drop-mask").hide(),this.dropdown.removeAttr("id"),this.dropdown.hide(),this.container.removeClass("select2-dropdown-open"),this.results.empty(),this.clearSearch(),this.search.removeClass("select2-active"),this.opts.element.trigger(a.Event("select2-close"))}},externalSearch:function(a){this.open(),this.search.val(a),this.updateResults(!1)},clearSearch:function(){},getMaximumSelectionSize:function(){return I(this.opts.maximumSelectionSize)},ensureHighlightVisible:function(){var d,e,f,g,h,i,j,c=this.results;if(e=this.highlight(),!(0>e)){if(0==e)return c.scrollTop(0),b;d=this.findHighlightableChoices().find(".select2-result-label"),f=a(d[e]),g=f.offset().top+f.outerHeight(!0),e===d.length-1&&(j=c.find("li.select2-more-results"),j.length>0&&(g=j.offset().top+j.outerHeight(!0))),h=c.offset().top+c.outerHeight(!0),g>h&&c.scrollTop(c.scrollTop()+(g-h)),i=f.offset().top-c.offset().top,0>i&&"none"!=f.css("display")&&c.scrollTop(c.scrollTop()+i)}},findHighlightableChoices:function(){return this.results.find(".select2-result-selectable:not(.select2-selected):not(.select2-disabled)")},moveHighlight:function(b){for(var c=this.findHighlightableChoices(),d=this.highlight();d>-1&&c.length>d;){d+=b;var e=a(c[d]);if(e.hasClass("select2-result-selectable")&&!e.hasClass("select2-disabled")&&!e.hasClass("select2-selected")){this.highlight(d);break}}},highlight:function(c){var e,f,d=this.findHighlightableChoices();return 0===arguments.length?m(d.filter(".select2-highlighted")[0],d.get()):(c>=d.length&&(c=d.length-1),0>c&&(c=0),this.results.find(".select2-highlighted").removeClass("select2-highlighted"),e=a(d[c]),e.addClass("select2-highlighted"),this.ensureHighlightVisible(),f=e.data("select2-data"),f&&this.opts.element.trigger({type:"select2-highlight",val:this.id(f),choice:f}),b)},countSelectableResults:function(){return this.findHighlightableChoices().length},highlightUnderEvent:function(b){var c=a(b.target).closest(".select2-result-selectable");if(c.length>0&&!c.is(".select2-highlighted")){var d=this.findHighlightableChoices();this.highlight(d.index(c))}else 0==c.length&&this.results.find(".select2-highlighted").removeClass("select2-highlighted")},loadMoreIfNeeded:function(){var c,a=this.results,b=a.find("li.select2-more-results"),e=this.resultsPage+1,f=this,g=this.search.val(),h=this.context;0!==b.length&&(c=b.offset().top-a.offset().top-a.height(),this.opts.loadMorePadding>=c&&(b.addClass("select2-active"),this.opts.query({element:this.opts.element,term:g,page:e,context:h,matcher:this.opts.matcher,callback:this.bind(function(c){f.opened()&&(f.opts.populateResults.call(this,a,c.results,{term:g,page:e,context:h}),f.postprocessResults(c,!1,!1),c.more===!0?(b.detach().appendTo(a).text(f.opts.formatLoadMore(e+1)),window.setTimeout(function(){f.loadMoreIfNeeded()},10)):b.remove(),f.positionDropdown(),f.resultsPage=e,f.context=c.context)})})))},tokenize:function(){},updateResults:function(c){function l(){d.removeClass("select2-active"),h.positionDropdown()}function m(a){e.html(a),l()}var g,i,d=this.search,e=this.results,f=this.opts,h=this,j=d.val(),k=a.data(this.container,"select2-last-term");if((c===!0||!k||!o(j,k))&&(a.data(this.container,"select2-last-term",j),c===!0||this.showSearchInput!==!1&&this.opened())){var n=this.getMaximumSelectionSize();if(n>=1&&(g=this.data(),a.isArray(g)&&g.length>=n&&H(f.formatSelectionTooBig,"formatSelectionTooBig")))return m(""+f.formatSelectionTooBig(n)+" "),b;if(d.val().length"+f.formatInputTooShort(d.val(),f.minimumInputLength)+""):m(""),c&&this.showSearch&&this.showSearch(!0),b;if(f.maximumInputLength&&d.val().length>f.maximumInputLength)return H(f.formatInputTooLong,"formatInputTooLong")?m(""+f.formatInputTooLong(d.val(),f.maximumInputLength)+" "):m(""),b;f.formatSearching&&0===this.findHighlightableChoices().length&&m(""+f.formatSearching()+" "),d.addClass("select2-active"),i=this.tokenize(),i!=b&&null!=i&&d.val(i),this.resultsPage=1,f.query({element:f.element,term:d.val(),page:this.resultsPage,context:null,matcher:f.matcher,callback:this.bind(function(g){var i;return this.opened()?(this.context=g.context===b?null:g.context,this.opts.createSearchChoice&&""!==d.val()&&(i=this.opts.createSearchChoice.call(h,d.val(),g.results),i!==b&&null!==i&&h.id(i)!==b&&null!==h.id(i)&&0===a(g.results).filter(function(){return o(h.id(this),h.id(i))}).length&&g.results.unshift(i)),0===g.results.length&&H(f.formatNoMatches,"formatNoMatches")?(m(""+f.formatNoMatches(d.val())+" "),b):(e.empty(),h.opts.populateResults.call(this,e,g.results,{term:d.val(),page:this.resultsPage,context:null}),g.more===!0&&H(f.formatLoadMore,"formatLoadMore")&&(e.append(""+h.opts.escapeMarkup(f.formatLoadMore(this.resultsPage))+" "),window.setTimeout(function(){h.loadMoreIfNeeded()},10)),this.postprocessResults(g,c),l(),this.opts.element.trigger({type:"select2-loaded",items:g}),b)):(this.search.removeClass("select2-active"),b)})})}},cancel:function(){this.close()},blur:function(){this.opts.selectOnBlur&&this.selectHighlighted({noFocus:!0}),this.close(),this.container.removeClass("select2-container-active"),this.search[0]===document.activeElement&&this.search.blur(),this.clearSearch(),this.selection.find(".select2-search-choice-focus").removeClass("select2-search-choice-focus")},focusSearch:function(){w(this.search)},selectHighlighted:function(a){var b=this.highlight(),c=this.results.find(".select2-highlighted"),d=c.closest(".select2-result").data("select2-data");d?(this.highlight(b),this.onSelect(d,a)):a&&a.noFocus&&this.close()},getPlaceholder:function(){var a;return this.opts.element.attr("placeholder")||this.opts.element.attr("data-placeholder")||this.opts.element.data("placeholder")||this.opts.placeholder||((a=this.getPlaceholderOption())!==b?a.text():b)},getPlaceholderOption:function(){if(this.select){var a=this.select.children().first();if(this.opts.placeholderOption!==b)return"first"===this.opts.placeholderOption&&a||"function"==typeof this.opts.placeholderOption&&this.opts.placeholderOption(this.select);if(""===a.text()&&""===a.val())return a}},initContainerWidth:function(){function c(){var c,d,e,f,g;if("off"===this.opts.width)return null;if("element"===this.opts.width)return 0===this.opts.element.outerWidth(!1)?"auto":this.opts.element.outerWidth(!1)+"px";if("copy"===this.opts.width||"resolve"===this.opts.width){if(c=this.opts.element.attr("style"),c!==b)for(d=c.split(";"),f=0,g=d.length;g>f;f+=1)if(e=d[f].replace(/\s/g,"").match(/width:(([-+]?([0-9]*\.)?[0-9]+)(px|em|ex|%|in|cm|mm|pt|pc))/i),null!==e&&e.length>=1)return e[1];return"resolve"===this.opts.width?(c=this.opts.element.css("width"),c.indexOf("%")>0?c:0===this.opts.element.outerWidth(!1)?"auto":this.opts.element.outerWidth(!1)+"px"):null}return a.isFunction(this.opts.width)?this.opts.width():this.opts.width}var d=c.call(this);null!==d&&this.container.css("width",d)}}),e=L(d,{createContainer:function(){var b=a(document.createElement("div")).attr({"class":"select2-container"}).html([""," "," "," "," ",""].join(""));return b},enableInterface:function(){this.parent.enableInterface.apply(this,arguments)&&this.focusser.prop("disabled",!this.isInterfaceEnabled())},opening:function(){var b,c,d;this.opts.minimumResultsForSearch>=0&&this.showSearch(!0),this.parent.opening.apply(this,arguments),this.showSearchInput!==!1&&this.search.val(this.focusser.val()),this.search.focus(),b=this.search.get(0),b.createTextRange?(c=b.createTextRange(),c.collapse(!1),c.select()):b.setSelectionRange&&(d=this.search.val().length,b.setSelectionRange(d,d)),this.focusser.prop("disabled",!0).val(""),this.updateResults(!0),this.opts.element.trigger(a.Event("select2-open"))},close:function(){this.opened()&&(this.parent.close.apply(this,arguments),this.focusser.removeAttr("disabled"),this.focusser.focus())},focus:function(){this.opened()?this.close():(this.focusser.removeAttr("disabled"),this.focusser.focus())},isFocused:function(){return this.container.hasClass("select2-container-active")},cancel:function(){this.parent.cancel.apply(this,arguments),this.focusser.removeAttr("disabled"),this.focusser.focus()},initContainer:function(){var d,e=this.container,f=this.dropdown;0>this.opts.minimumResultsForSearch?this.showSearch(!1):this.showSearch(!0),this.selection=d=e.find(".select2-choice"),this.focusser=e.find(".select2-focusser"),this.focusser.attr("id","s2id_autogen"+g()),a("label[for='"+this.opts.element.attr("id")+"']").attr("for",this.focusser.attr("id")),this.focusser.attr("tabindex",this.elementTabIndex),this.search.on("keydown",this.bind(function(a){if(this.isInterfaceEnabled()){if(a.which===c.PAGE_UP||a.which===c.PAGE_DOWN)return y(a),b;switch(a.which){case c.UP:case c.DOWN:return this.moveHighlight(a.which===c.UP?-1:1),y(a),b;case c.ENTER:return this.selectHighlighted(),y(a),b;case c.TAB:return this.selectHighlighted({noFocus:!0}),b;case c.ESC:return this.cancel(a),y(a),b}}})),this.search.on("blur",this.bind(function(){document.activeElement===this.body().get(0)&&window.setTimeout(this.bind(function(){this.search.focus()}),0)})),this.focusser.on("keydown",this.bind(function(a){if(this.isInterfaceEnabled()&&a.which!==c.TAB&&!c.isControl(a)&&!c.isFunctionKey(a)&&a.which!==c.ESC){if(this.opts.openOnEnter===!1&&a.which===c.ENTER)return y(a),b;if(a.which==c.DOWN||a.which==c.UP||a.which==c.ENTER&&this.opts.openOnEnter){if(a.altKey||a.ctrlKey||a.shiftKey||a.metaKey)return;return this.open(),y(a),b}return a.which==c.DELETE||a.which==c.BACKSPACE?(this.opts.allowClear&&this.clear(),y(a),b):b}})),r(this.focusser),this.focusser.on("keyup-change input",this.bind(function(a){if(this.opts.minimumResultsForSearch>=0){if(a.stopPropagation(),this.opened())return;this.open()}})),d.on("mousedown","abbr",this.bind(function(a){this.isInterfaceEnabled()&&(this.clear(),z(a),this.close(),this.selection.focus())})),d.on("mousedown",this.bind(function(b){this.container.hasClass("select2-container-active")||this.opts.element.trigger(a.Event("select2-focus")),this.opened()?this.close():this.isInterfaceEnabled()&&this.open(),y(b)})),f.on("mousedown",this.bind(function(){this.search.focus()})),d.on("focus",this.bind(function(a){y(a)})),this.focusser.on("focus",this.bind(function(){this.container.hasClass("select2-container-active")||this.opts.element.trigger(a.Event("select2-focus")),this.container.addClass("select2-container-active")})).on("blur",this.bind(function(){this.opened()||(this.container.removeClass("select2-container-active"),this.opts.element.trigger(a.Event("select2-blur")))})),this.search.on("focus",this.bind(function(){this.container.hasClass("select2-container-active")||this.opts.element.trigger(a.Event("select2-focus")),this.container.addClass("select2-container-active")})),this.initContainerWidth(),this.opts.element.addClass("select2-offscreen"),this.setPlaceholder()},clear:function(a){var b=this.selection.data("select2-data");if(b){var c=this.getPlaceholderOption();this.opts.element.val(c?c.val():""),this.selection.find(".select2-chosen").empty(),this.selection.removeData("select2-data"),this.setPlaceholder(),a!==!1&&(this.opts.element.trigger({type:"select2-removed",val:this.id(b),choice:b}),this.triggerChange({removed:b}))}},initSelection:function(){if(this.isPlaceholderOptionSelected())this.updateSelection([]),this.close(),this.setPlaceholder();else{var c=this;this.opts.initSelection.call(null,this.opts.element,function(a){a!==b&&null!==a&&(c.updateSelection(a),c.close(),c.setPlaceholder())})}},isPlaceholderOptionSelected:function(){var a;return(a=this.getPlaceholderOption())!==b&&a.is(":selected")||""===this.opts.element.val()||this.opts.element.val()===b||null===this.opts.element.val()},prepareOpts:function(){var b=this.parent.prepareOpts.apply(this,arguments),c=this;return"select"===b.element.get(0).tagName.toLowerCase()?b.initSelection=function(a,b){var d=a.find(":selected");b(c.optionToData(d))}:"data"in b&&(b.initSelection=b.initSelection||function(c,d){var e=c.val(),f=null;b.query({matcher:function(a,c,d){var g=o(e,b.id(d));return g&&(f=d),g},callback:a.isFunction(d)?function(){d(f)}:a.noop})}),b},getPlaceholder:function(){return this.select&&this.getPlaceholderOption()===b?b:this.parent.getPlaceholder.apply(this,arguments)},setPlaceholder:function(){var a=this.getPlaceholder();if(this.isPlaceholderOptionSelected()&&a!==b){if(this.select&&this.getPlaceholderOption()===b)return;this.selection.find(".select2-chosen").html(this.opts.escapeMarkup(a)),this.selection.addClass("select2-default"),this.container.removeClass("select2-allowclear")}},postprocessResults:function(a,c,d){var e=0,f=this;if(this.findHighlightableChoices().each2(function(a,c){return o(f.id(c.data("select2-data")),f.opts.element.val())?(e=a,!1):b}),d!==!1&&(c===!0&&e>=0?this.highlight(e):this.highlight(0)),c===!0){var h=this.opts.minimumResultsForSearch;h>=0&&this.showSearch(J(a.results)>=h)}},showSearch:function(b){this.showSearchInput!==b&&(this.showSearchInput=b,this.dropdown.find(".select2-search").toggleClass("select2-search-hidden",!b),this.dropdown.find(".select2-search").toggleClass("select2-offscreen",!b),a(this.dropdown,this.container).toggleClass("select2-with-searchbox",b))},onSelect:function(a,b){if(this.triggerSelect(a)){var c=this.opts.element.val(),d=this.data();this.opts.element.val(this.id(a)),this.updateSelection(a),this.opts.element.trigger({type:"select2-selected",val:this.id(a),choice:a}),this.close(),b&&b.noFocus||this.selection.focus(),o(c,this.id(a))||this.triggerChange({added:a,removed:d})}},updateSelection:function(a){var d,e,c=this.selection.find(".select2-chosen");this.selection.data("select2-data",a),c.empty(),d=this.opts.formatSelection(a,c,this.opts.escapeMarkup),d!==b&&c.append(d),e=this.opts.formatSelectionCssClass(a,c),e!==b&&c.addClass(e),this.selection.removeClass("select2-default"),this.opts.allowClear&&this.getPlaceholder()!==b&&this.container.addClass("select2-allowclear")
-},val:function(){var a,c=!1,d=null,e=this,f=this.data();if(0===arguments.length)return this.opts.element.val();if(a=arguments[0],arguments.length>1&&(c=arguments[1]),this.select)this.select.val(a).find(":selected").each2(function(a,b){return d=e.optionToData(b),!1}),this.updateSelection(d),this.setPlaceholder(),c&&this.triggerChange({added:d,removed:f});else{if(!a&&0!==a)return this.clear(c),b;if(this.opts.initSelection===b)throw Error("cannot call val() if initSelection() is not defined");this.opts.element.val(a),this.opts.initSelection(this.opts.element,function(a){e.opts.element.val(a?e.id(a):""),e.updateSelection(a),e.setPlaceholder(),c&&e.triggerChange({added:a,removed:f})})}},clearSearch:function(){this.search.val(""),this.focusser.val("")},data:function(a,c){var d;return 0===arguments.length?(d=this.selection.data("select2-data"),d==b&&(d=null),d):(a&&""!==a?(d=this.data(),this.opts.element.val(a?this.id(a):""),this.updateSelection(a),c&&this.triggerChange({added:a,removed:d})):this.clear(c),b)}}),f=L(d,{createContainer:function(){var b=a(document.createElement("div")).attr({"class":"select2-container select2-container-multi"}).html(["",""].join(""));return b},prepareOpts:function(){var b=this.parent.prepareOpts.apply(this,arguments),c=this;return"select"===b.element.get(0).tagName.toLowerCase()?b.initSelection=function(a,b){var d=[];a.find(":selected").each2(function(a,b){d.push(c.optionToData(b))}),b(d)}:"data"in b&&(b.initSelection=b.initSelection||function(c,d){var e=p(c.val(),b.separator),f=[];b.query({matcher:function(c,d,g){var h=a.grep(e,function(a){return o(a,b.id(g))}).length;return h&&f.push(g),h},callback:a.isFunction(d)?function(){for(var a=[],c=0;e.length>c;c++)for(var g=e[c],h=0;f.length>h;h++){var i=f[h];if(o(g,b.id(i))){a.push(i),f.splice(h,1);break}}d(a)}:a.noop})}),b},selectChoice:function(a){var b=this.container.find(".select2-search-choice-focus");b.length&&a&&a[0]==b[0]||(b.length&&this.opts.element.trigger("choice-deselected",b),b.removeClass("select2-search-choice-focus"),a&&a.length&&(this.close(),a.addClass("select2-search-choice-focus"),this.opts.element.trigger("choice-selected",a)))},initContainer:function(){var e,d=".select2-choices";this.searchContainer=this.container.find(".select2-search-field"),this.selection=e=this.container.find(d);var f=this;this.selection.on("mousedown",".select2-search-choice",function(){f.search[0].focus(),f.selectChoice(a(this))}),this.search.attr("id","s2id_autogen"+g()),a("label[for='"+this.opts.element.attr("id")+"']").attr("for",this.search.attr("id")),this.search.on("input paste",this.bind(function(){this.isInterfaceEnabled()&&(this.opened()||this.open())})),this.search.attr("tabindex",this.elementTabIndex),this.keydowns=0,this.search.on("keydown",this.bind(function(a){if(this.isInterfaceEnabled()){++this.keydowns;var d=e.find(".select2-search-choice-focus"),f=d.prev(".select2-search-choice:not(.select2-locked)"),g=d.next(".select2-search-choice:not(.select2-locked)"),h=x(this.search);if(d.length&&(a.which==c.LEFT||a.which==c.RIGHT||a.which==c.BACKSPACE||a.which==c.DELETE||a.which==c.ENTER)){var i=d;return a.which==c.LEFT&&f.length?i=f:a.which==c.RIGHT?i=g.length?g:null:a.which===c.BACKSPACE?(this.unselect(d.first()),this.search.width(10),i=f.length?f:g):a.which==c.DELETE?(this.unselect(d.first()),this.search.width(10),i=g.length?g:null):a.which==c.ENTER&&(i=null),this.selectChoice(i),y(a),i&&i.length||this.open(),b}if((a.which===c.BACKSPACE&&1==this.keydowns||a.which==c.LEFT)&&0==h.offset&&!h.length)return this.selectChoice(e.find(".select2-search-choice:not(.select2-locked)").last()),y(a),b;if(this.selectChoice(null),this.opened())switch(a.which){case c.UP:case c.DOWN:return this.moveHighlight(a.which===c.UP?-1:1),y(a),b;case c.ENTER:return this.selectHighlighted(),y(a),b;case c.TAB:return this.selectHighlighted({noFocus:!0}),this.close(),b;case c.ESC:return this.cancel(a),y(a),b}if(a.which!==c.TAB&&!c.isControl(a)&&!c.isFunctionKey(a)&&a.which!==c.BACKSPACE&&a.which!==c.ESC){if(a.which===c.ENTER){if(this.opts.openOnEnter===!1)return;if(a.altKey||a.ctrlKey||a.shiftKey||a.metaKey)return}this.open(),(a.which===c.PAGE_UP||a.which===c.PAGE_DOWN)&&y(a),a.which===c.ENTER&&y(a)}}})),this.search.on("keyup",this.bind(function(){this.keydowns=0,this.resizeSearch()})),this.search.on("blur",this.bind(function(b){this.container.removeClass("select2-container-active"),this.search.removeClass("select2-focused"),this.selectChoice(null),this.opened()||this.clearSearch(),b.stopImmediatePropagation(),this.opts.element.trigger(a.Event("select2-blur"))})),this.container.on("click",d,this.bind(function(b){this.isInterfaceEnabled()&&(a(b.target).closest(".select2-search-choice").length>0||(this.selectChoice(null),this.clearPlaceholder(),this.container.hasClass("select2-container-active")||this.opts.element.trigger(a.Event("select2-focus")),this.open(),this.focusSearch(),b.preventDefault()))})),this.container.on("focus",d,this.bind(function(){this.isInterfaceEnabled()&&(this.container.hasClass("select2-container-active")||this.opts.element.trigger(a.Event("select2-focus")),this.container.addClass("select2-container-active"),this.dropdown.addClass("select2-drop-active"),this.clearPlaceholder())})),this.initContainerWidth(),this.opts.element.addClass("select2-offscreen"),this.clearSearch()},enableInterface:function(){this.parent.enableInterface.apply(this,arguments)&&this.search.prop("disabled",!this.isInterfaceEnabled())},initSelection:function(){if(""===this.opts.element.val()&&""===this.opts.element.text()&&(this.updateSelection([]),this.close(),this.clearSearch()),this.select||""!==this.opts.element.val()){var c=this;this.opts.initSelection.call(null,this.opts.element,function(a){a!==b&&null!==a&&(c.updateSelection(a),c.close(),c.clearSearch())})}},clearSearch:function(){var a=this.getPlaceholder(),c=this.getMaxSearchWidth();a!==b&&0===this.getVal().length&&this.search.hasClass("select2-focused")===!1?(this.search.val(a).addClass("select2-default"),this.search.width(c>0?c:this.container.css("width"))):this.search.val("").width(10)},clearPlaceholder:function(){this.search.hasClass("select2-default")&&this.search.val("").removeClass("select2-default")},opening:function(){this.clearPlaceholder(),this.resizeSearch(),this.parent.opening.apply(this,arguments),this.focusSearch(),this.updateResults(!0),this.search.focus(),this.opts.element.trigger(a.Event("select2-open"))},close:function(){this.opened()&&this.parent.close.apply(this,arguments)},focus:function(){this.close(),this.search.focus()},isFocused:function(){return this.search.hasClass("select2-focused")},updateSelection:function(b){var c=[],d=[],e=this;a(b).each(function(){0>m(e.id(this),c)&&(c.push(e.id(this)),d.push(this))}),b=d,this.selection.find(".select2-search-choice").remove(),a(b).each(function(){e.addSelectedChoice(this)}),e.postprocessResults()},tokenize:function(){var a=this.search.val();a=this.opts.tokenizer.call(this,a,this.data(),this.bind(this.onSelect),this.opts),null!=a&&a!=b&&(this.search.val(a),a.length>0&&this.open())},onSelect:function(a,b){this.triggerSelect(a)&&(this.addSelectedChoice(a),this.opts.element.trigger({type:"selected",val:this.id(a),choice:a}),(this.select||!this.opts.closeOnSelect)&&this.postprocessResults(),this.opts.closeOnSelect?(this.close(),this.search.width(10)):this.countSelectableResults()>0?(this.search.width(10),this.resizeSearch(),this.getMaximumSelectionSize()>0&&this.val().length>=this.getMaximumSelectionSize()&&this.updateResults(!0),this.positionDropdown()):(this.close(),this.search.width(10)),this.triggerChange({added:a}),b&&b.noFocus||this.focusSearch())},cancel:function(){this.close(),this.focusSearch()},addSelectedChoice:function(c){var j,k,d=!c.locked,e=a("
"),f=a("
"),g=d?e:f,h=this.id(c),i=this.getVal();j=this.opts.formatSelection(c,g.find("div"),this.opts.escapeMarkup),j!=b&&g.find("div").replaceWith(""+j+"
"),k=this.opts.formatSelectionCssClass(c,g.find("div")),k!=b&&g.addClass(k),d&&g.find(".select2-search-choice-close").on("mousedown",y).on("click dblclick",this.bind(function(b){this.isInterfaceEnabled()&&(a(b.target).closest(".select2-search-choice").fadeOut("fast",this.bind(function(){this.unselect(a(b.target)),this.selection.find(".select2-search-choice-focus").removeClass("select2-search-choice-focus"),this.close(),this.focusSearch()})).dequeue(),y(b))})).on("focus",this.bind(function(){this.isInterfaceEnabled()&&(this.container.addClass("select2-container-active"),this.dropdown.addClass("select2-drop-active"))})),g.data("select2-data",c),g.insertBefore(this.searchContainer),i.push(h),this.setVal(i)},unselect:function(a){var c,d,b=this.getVal();if(a=a.closest(".select2-search-choice"),0===a.length)throw"Invalid argument: "+a+". Must be .select2-search-choice";c=a.data("select2-data"),c&&(d=m(this.id(c),b),d>=0&&(b.splice(d,1),this.setVal(b),this.select&&this.postprocessResults()),a.remove(),this.opts.element.trigger({type:"removed",val:this.id(c),choice:c}),this.triggerChange({removed:c}))},postprocessResults:function(a,b,c){var d=this.getVal(),e=this.results.find(".select2-result"),f=this.results.find(".select2-result-with-children"),g=this;e.each2(function(a,b){var c=g.id(b.data("select2-data"));m(c,d)>=0&&(b.addClass("select2-selected"),b.find(".select2-result-selectable").addClass("select2-selected"))}),f.each2(function(a,b){b.is(".select2-result-selectable")||0!==b.find(".select2-result-selectable:not(.select2-selected)").length||b.addClass("select2-selected")}),-1==this.highlight()&&c!==!1&&g.highlight(0),!this.opts.createSearchChoice&&!e.filter(".select2-result:not(.select2-selected)").length>0&&(!a||a&&!a.more&&0===this.results.find(".select2-no-results").length)&&H(g.opts.formatNoMatches,"formatNoMatches")&&this.results.append(""+g.opts.formatNoMatches(g.search.val())+" ")},getMaxSearchWidth:function(){return this.selection.width()-q(this.search)},resizeSearch:function(){var a,b,c,d,e,f=q(this.search);a=A(this.search)+10,b=this.search.offset().left,c=this.selection.width(),d=this.selection.offset().left,e=c-(b-d)-f,a>e&&(e=c-f),40>e&&(e=c-f),0>=e&&(e=a),this.search.width(e)},getVal:function(){var a;return this.select?(a=this.select.val(),null===a?[]:a):(a=this.opts.element.val(),p(a,this.opts.separator))},setVal:function(b){var c;this.select?this.select.val(b):(c=[],a(b).each(function(){0>m(this,c)&&c.push(this)}),this.opts.element.val(0===c.length?"":c.join(this.opts.separator)))},buildChangeDetails:function(a,b){for(var b=b.slice(0),a=a.slice(0),c=0;b.length>c;c++)for(var d=0;a.length>d;d++)o(this.opts.id(b[c]),this.opts.id(a[d]))&&(b.splice(c,1),c--,a.splice(d,1),d--);return{added:b,removed:a}},val:function(c,d){var e,f=this;if(0===arguments.length)return this.getVal();if(e=this.data(),e.length||(e=[]),!c&&0!==c)return this.opts.element.val(""),this.updateSelection([]),this.clearSearch(),d&&this.triggerChange({added:this.data(),removed:e}),b;if(this.setVal(c),this.select)this.opts.initSelection(this.select,this.bind(this.updateSelection)),d&&this.triggerChange(this.buildChangeDetails(e,this.data()));else{if(this.opts.initSelection===b)throw Error("val() cannot be called if initSelection() is not defined");this.opts.initSelection(this.opts.element,function(b){var c=a.map(b,f.id);f.setVal(c),f.updateSelection(b),f.clearSearch(),d&&f.triggerChange(this.buildChangeDetails(e,this.data()))})}this.clearSearch()},onSortStart:function(){if(this.select)throw Error("Sorting of elements is not supported when attached to . Attach to instead.");this.search.width(0),this.searchContainer.hide()},onSortEnd:function(){var b=[],c=this;this.searchContainer.show(),this.searchContainer.appendTo(this.searchContainer.parent()),this.resizeSearch(),this.selection.find(".select2-search-choice").each(function(){b.push(c.opts.id(a(this).data("select2-data")))}),this.setVal(b),this.triggerChange()},data:function(c,d){var f,g,e=this;return 0===arguments.length?this.selection.find(".select2-search-choice").map(function(){return a(this).data("select2-data")}).get():(g=this.data(),c||(c=[]),f=a.map(c,function(a){return e.opts.id(a)}),this.setVal(f),this.updateSelection(c),this.clearSearch(),d&&this.triggerChange(this.buildChangeDetails(g,this.data())),b)}}),a.fn.select2=function(){var d,g,h,i,j,c=Array.prototype.slice.call(arguments,0),k=["val","destroy","opened","open","close","focus","isFocused","container","dropdown","onSortStart","onSortEnd","enable","readonly","positionDropdown","data","search"],l=["val","opened","isFocused","container","data"],n={search:"externalSearch"};return this.each(function(){if(0===c.length||"object"==typeof c[0])d=0===c.length?{}:a.extend({},c[0]),d.element=a(this),"select"===d.element.get(0).tagName.toLowerCase()?j=d.element.prop("multiple"):(j=d.multiple||!1,"tags"in d&&(d.multiple=j=!0)),g=j?new f:new e,g.init(d);else{if("string"!=typeof c[0])throw"Invalid arguments to select2 plugin: "+c;if(0>m(c[0],k))throw"Unknown method: "+c[0];if(i=b,g=a(this).data("select2"),g===b)return;if(h=c[0],"container"===h?i=g.container:"dropdown"===h?i=g.dropdown:(n[h]&&(h=n[h]),i=g[h].apply(g,c.slice(1))),m(c[0],l)>=0)return!1}}),i===b?this:i},a.fn.select2.defaults={width:"copy",loadMorePadding:0,closeOnSelect:!0,openOnEnter:!0,containerCss:{},dropdownCss:{},containerCssClass:"",dropdownCssClass:"",formatResult:function(a,b,c,d){var e=[];return C(a.text,c.term,e,d),e.join("")},formatSelection:function(a,c,d){return a?d(a.text):b},sortResults:function(a){return a},formatResultCssClass:function(){return b},formatSelectionCssClass:function(){return b},formatNoMatches:function(){return"No matches found"},formatInputTooShort:function(a,b){var c=b-a.length;return"Please enter "+c+" more character"+(1==c?"":"s")},formatInputTooLong:function(a,b){var c=a.length-b;return"Please delete "+c+" character"+(1==c?"":"s")},formatSelectionTooBig:function(a){return"You can only select "+a+" item"+(1==a?"":"s")},formatLoadMore:function(){return"Loading more results..."},formatSearching:function(){return"Searching..."},minimumResultsForSearch:0,minimumInputLength:0,maximumInputLength:null,maximumSelectionSize:0,id:function(a){return a.id},matcher:function(a,b){return(""+b).toUpperCase().indexOf((""+a).toUpperCase())>=0},separator:",",tokenSeparators:[],tokenizer:K,escapeMarkup:D,blurOnChange:!1,selectOnBlur:!1,adaptContainerCssClass:function(a){return a},adaptDropdownCssClass:function(){return null}},a.fn.select2.ajaxDefaults={transport:a.ajax,params:{type:"GET",cache:!1,dataType:"json"}},window.Select2={query:{ajax:E,local:F,tags:G},util:{debounce:t,markMatch:C,escapeMarkup:D},"class":{"abstract":d,single:e,multi:f}}}}(jQuery);
\ No newline at end of file
diff --git a/stackalytics/dashboard/static/js/stackalytics-kpi.js b/stackalytics/dashboard/static/js/stackalytics-kpi.js
deleted file mode 100644
index d066d08c5..000000000
--- a/stackalytics/dashboard/static/js/stackalytics-kpi.js
+++ /dev/null
@@ -1,395 +0,0 @@
-/*
- Copyright (c) 2014 Mirantis Inc.
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- */
-
-function showError(container, message) {
- container.append($("Error! " + message + " "));
-}
-
-function appendKpiBlock(container_id, kpi_block) {
- var container = container_id;
- if (typeof container_id == "string") {
- container = $("#" + container_id);
- }
- var template = $("#kpi_block_template");
- if (template.length > 0) {
- container.append(template.tmpl(kpi_block));
- } else {
- container.append($("" + JSON.stringify(kpi_block) + " "));
- }
-}
-
-function processStats(container_id, url, query_options, item_id, metric, text_goal, comparator) {
- $.ajax({
- url: makeURI(url, query_options),
- dataType: "jsonp",
- success: function (data) {
- data = data["stats"];
- var position = -1;
- var sum = 0;
-
- for (var i = 0; i < data.length; i++) {
- sum += data[i][metric];
- data[i].index = data[i]["index"];
- if (data[i].id == item_id) {
- position = i;
- }
- }
-
- var result = {
- mark: false,
- text_goal: text_goal
- };
-
- if (position < 0) {
- result.info = "Item " + item_id + " is not found in the stats";
- }
- else {
- var comparison_result = comparator(data[position], sum);
- result.mark = comparison_result.mark;
- result.info = comparison_result.info;
- }
- appendKpiBlock(container_id, result);
- }
- });
-}
-
-function goalPositionInTop(container_id, query_options, item_type, item_id, position, text_goal) {
- $(document).ready(function () {
- processStats(container_id, "/api/1.0/stats/" + item_type, query_options, item_id, "metric", text_goal,
- function (item, sum) {
- var mark = item.index <= position;
- return {
- mark: mark,
- info: mark ? "Achieved position is " + item.index :
- "Position " + item.index + " is worse than the goal position " + position,
- value: item.index
- }
- });
- });
-}
-
-function goalMetric(container_id, query_options, item_type, item_id, target, text_goal) {
- $(document).ready(function () {
- processStats(container_id, "/api/1.0/stats/" + item_type, query_options, item_id, "metric", text_goal,
- function (item, sum) {
- var mark = item.metric >= target;
- return {
- mark: mark,
- info: mark ? "Achieved metric " + item.metric :
- "Metric " + item.metric + " is worse than the goal in " + target,
- value: item.index
- }
- });
- });
-}
-
-function goalPercentageInTopLessThan(container_id, query_options, item_type, item_id, target_percentage, text_goal) {
- $(document).ready(function () {
- processStats(container_id, "/api/1.0/stats/" + item_type, query_options, item_id, "metric", text_goal,
- function (item, sum) {
- var percentage = item.metric / sum;
- var mark = percentage <= target_percentage;
- var percentage_formatted = Math.round(percentage * 100) + "%";
- var goal_percentage_formatted = Math.round(target_percentage * 100) + "%";
- return {
- mark: mark,
- info: mark ? "Achieved percentage " + percentage_formatted :
- "Value " + percentage_formatted + " is more than the goal " + goal_percentage_formatted,
- value: percentage_formatted
- }
- });
- });
-}
-
-function goalDisagreementRatioLessThan(container_id, query_options, item_id, target_percentage, text_goal) {
- $(document).ready(function () {
- processStats(container_id, "/api/1.0/stats/engineers", query_options, item_id, "disagreement_ratio", text_goal,
- function (item, sum) {
- var percentage = parseFloat(item["disagreement_ratio"]);
- var mark = percentage < target_percentage * 100;
- var goal_percentage_formatted = Math.round(target_percentage * 100) + "%";
- return {
- mark: mark,
- info: mark ? "Achieved percentage " + item["disagreement_ratio"] :
- "Value " + item["disagreement_ratio"] + " is more than the goal " + goal_percentage_formatted,
- value: percentage
- }
- });
- });
-}
-
-function goalCoreEngineerInProject(container_id, user_id, project, text_goal) {
- $(document).ready(function () {
- $.ajax({
- url: makeURI("/api/1.0/users/" + user_id),
- dataType: "jsonp",
- success: function (data) {
- var user = data.user;
- var is_core = false;
- if (user.core) {
- for (var i in user.core) {
- if (user.core[i][0] == project) {
- is_core = true;
- }
- }
- }
- var result = {
- mark: is_core,
- text_goal: text_goal,
- info: user.user_name + " (" + user_id + ") is " + (is_core ? "" : "not ") + "core engineer in " + project
- };
- appendKpiBlock(container_id, result);
- },
- error: function () {
- var result = {
- mark: false,
- text_goal: text_goal,
- info: "Item " + user_id + " is not found in the stats"
- };
- appendKpiBlock(container_id, result);
- }
- });
- });
-}
-
-function loadAndShowUserProfile(container, user_id) {
- $.ajax({
- url: makeURI("/api/1.0/users/" + user_id),
- dataType: "json",
- success: function (data) {
- var user = data["user"];
- container.html(user["user_name"] + " (" + user["user_id"] + ")");
- }
- });
-}
-
-function loadAndShowModuleDetails(container, module_id) {
- $.ajax({
- url: makeURI("/api/1.0/modules/" + module_id),
- dataType: "json",
- success: function (data) {
- var module = data["module"];
- container.html(module["name"] + " (" + module["id"] + ")");
- }
- });
-}
-
-var now = Math.floor(Date.now() / 1000);
-
-var release_pattern = /Release (\S+)/;
-var group_pattern = /Group (\S+)/;
-var company_pattern = /Company (\S+)/;
-var user_pattern = /User (\S+)/;
-
-var in_pattern = /.*?(\s+in\s+(\S+)).*/;
-var during_pattern = /.*?(\s+during\s+(\d+)\s+days).*/;
-var make_pattern = /(make|draft|send|write|implement|file|fix|complete)\s+(\d+)\s+(\S+)/;
-var top_pattern = /top\s+(\d+)\s+by\s+(\S+)/;
-var core_pattern = /(become|stay)\s+core/;
-var not_less_than_pattern = /less\s+than\s+(\d+)%\s+by\s+(\S+)/;
-
-function makeKpiRequestOptions(release, metric, module, duration) {
- var options = {metric: metric, module: module, project_type: "all"};
- if (duration) {
- options["start_date"] = now - duration * 60 * 60 * 24;
- options["release"] = "all";
- } else {
- options["release"] = release;
- }
- return options;
-}
-
-function runMakeStatement(statement, verb, count, noun, duration, item_type, item_id, module, release, container) {
- var metric = noun;
-
- if (noun == "blueprints") {
- metric = (verb == "draft" || verb == "file") ? "bpd" : "bpc";
- }
- if (noun == "bugs") {
- metric = (verb == "file") ? "filed-bugs" : "resolved-bugs";
- }
- if (noun == "reviews") {
- metric = "marks";
- }
-
- goalMetric(container, makeKpiRequestOptions(release, metric, module, duration),
- item_type, item_id, count, statement);
-}
-
-function runTopStatement(statement, position, noun, duration, item_type, item_id, module, release, container) {
- var metric = noun;
- if (noun == "reviews") {
- metric = "marks";
- }
-
- goalPositionInTop(container, makeKpiRequestOptions(release, metric, module, duration),
- item_type, item_id, position, statement);
-}
-
-function runNotLessThanStatement(statement, percentage, noun, duration, item_type, item_id, module, release, container) {
- var metric = noun;
- if (noun == "reviews") {
- metric = "marks";
- }
-
- goalPercentageInTopLessThan(container,
- makeKpiRequestOptions(release, metric, module, duration),
- item_type, item_id, percentage / 100.0, statement);
-}
-
-function parseStatements(item_type, item_id, module, release, details, container) {
- for (var i in details) {
- var original_statement = details[i];
- var statement = original_statement;
- var local_module = module;
- var duration = null;
-
- var pattern_match = in_pattern.exec(statement);
- if (pattern_match) {
- local_module = pattern_match[2];
- statement = statement.replace(pattern_match[1], "");
- }
- pattern_match = during_pattern.exec(statement);
- if (pattern_match) {
- duration = pattern_match[2];
- statement = statement.replace(pattern_match[1], "");
- }
-
- statement = statement.trim();
-
- pattern_match = make_pattern.exec(statement);
- if (pattern_match) {
- runMakeStatement(original_statement, pattern_match[1], pattern_match[2], pattern_match[3], duration,
- item_type, item_id, local_module, release, container);
- continue;
- }
-
- pattern_match = top_pattern.exec(statement);
- if (pattern_match) {
- runTopStatement(original_statement, pattern_match[1], pattern_match[2], duration,
- item_type, item_id, local_module, release, container);
- continue;
- }
-
- pattern_match = core_pattern.exec(statement);
- if (pattern_match) {
- goalCoreEngineerInProject(container, item_id, local_module, original_statement);
- continue;
- }
-
- pattern_match = not_less_than_pattern.exec(statement);
- if (pattern_match) {
- runNotLessThanStatement(original_statement, pattern_match[1], pattern_match[2], duration,
- item_type, item_id, local_module, release, container);
- continue;
- }
-
- showError(container, "Could not parse statement: '" + statement + "'");
- }
-}
-
-function parseGroup(group, release, details, container) {
- var users = [];
-
- for (var token in details) {
- var pattern_match = user_pattern.exec(token);
- if (pattern_match) {
- var user = pattern_match[1];
- users.push(user);
-
- var body = $("
");
- var user_title_block = $("" + user + " ");
- container.append(user_title_block).append(body);
-
- loadAndShowUserProfile(user_title_block, user);
-
- parseStatements("engineers", user, group, release, details[token], body);
- continue;
- }
-
- pattern_match = company_pattern.exec(token);
- if (pattern_match) {
- var company = pattern_match[1];
-
- body = $("
");
- container.append($("" + company + " ")).append(body);
-
- parseStatements("companies", company, group, release, details[token], body);
- continue;
- }
-
- showError(container, "Could not parse line: '" + details[token] + "'");
- }
-}
-
-function parseRelease(release, details, container) {
- for (var token in details) {
- var pattern_match = group_pattern.exec(token);
- if (pattern_match) {
- var group = pattern_match[1];
-
- var body = $("
");
- var title_block = $("" + group + " ");
- container.append(title_block).append(body);
-
- loadAndShowModuleDetails(title_block, group);
-
- parseGroup(group, release, details[token], body);
- continue;
- }
-
- pattern_match = company_pattern.exec(token);
- if (pattern_match) {
- var company = pattern_match[1];
-
- body = $("
");
- container.append($("" + company + " ")).append(body);
-
- parseStatements("companies", company, "all", release, details[token], body);
- continue;
- }
-
- showError(container, "Could not parse line: '" + token + "'");
- }
-}
-
-function parseKpiScript(parsed_script, container) {
- for (var token in parsed_script) {
- var pattern_match = release_pattern.exec(token);
- if (pattern_match) {
- var release = pattern_match[1];
-
- var body = $("
");
- $(container).append($("" + release + " ")).append(body);
-
- parseRelease(release, parsed_script[token], body);
- continue;
- }
- showError(container, "Could not parse line: '" + token + "'");
- }
-}
-
-function readKpiScript(kpi_script, container_id) {
- var root_container = $("#" + container_id).empty();
-
- try {
- var parsed_script = jsyaml.safeLoad(kpi_script);
- parseKpiScript(parsed_script, root_container);
- } catch (e) {
- showError(root_container, "Could not parse script: '" + kpi_script + "'");
- }
-}
diff --git a/stackalytics/dashboard/static/js/stackalytics-ui.js b/stackalytics/dashboard/static/js/stackalytics-ui.js
deleted file mode 100644
index adcb8eefb..000000000
--- a/stackalytics/dashboard/static/js/stackalytics-ui.js
+++ /dev/null
@@ -1,429 +0,0 @@
-/*
- Copyright (c) 2013 Mirantis Inc.
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- */
-
-String.prototype.trunc =
- function (n) {
- if (this.length <= n) return this;
- return this.substr(0, this.substr(0, n).lastIndexOf(' ')) + "…";
- };
-
-function _createTimeline(data) {
- var plot_attrs = {
- gridPadding: {
- right: 35
- },
- cursor: {
- show: false
- },
- highlighter: {
- show: true,
- sizeAdjust: 6
- },
- axes: {
- xaxis: {
- tickRenderer: $.jqplot.CanvasAxisTickRenderer,
- tickOptions: {
- fontSize: '8pt',
- angle: -90,
- formatString: '%b \'%y'
- },
- renderer: $.jqplot.DateAxisRenderer,
- tickInterval: '1 month'
- },
- yaxis: {
- min: 0,
- label: ''
- }
- },
- series: [
- {
- shadow: false,
- fill: true,
- fillColor: '#4bb2c5',
- fillAlpha: 0.3
- },
- {
- shadow: false,
- fill: true,
- color: '#4bb2c5',
- fillColor: '#4bb2c5'
- }
- ]
- }
- /* add the secondary line only if it is has positive values */
- var has_2 = false;
- for (var i=0; i 0) {
- has_2 = true;
- break;
- }
- }
- if (has_2) {
- plot_attrs.axes.y2axis = {min: 0, label: ''};
- plot_attrs.series.push({
- shadow: false,
- lineWidth: 1.5,
- showMarker: true,
- markerOptions: { size: 5 },
- yaxis: 'y2axis'
- });
- } else {
- data.pop();
- }
- $.jqplot('timeline', data, plot_attrs);
-}
-
-function renderTimeline(options) {
- $(document).ready(function () {
- $.ajax({
- url: makeURI("/api/1.0/stats/timeline", options),
- dataType: "json",
- success: function (data) {
- _createTimeline(data["timeline"]);
- }
- });
- });
-}
-
-function renderTableAndChart(url, container_id, table_id, chart_id, link_param, table_column_names) {
-
- $(document).ready(function () {
-
- $.ajax({
- url: makeURI(url),
- dataType: "json",
- success: function (data) {
-
- var tableData = [];
- var chartData = [];
-
- const limit = 10;
- var aggregate = 0;
- var i;
-
- data = data["stats"];
-
- if (data.length == 0) {
- $("#" + container_id).hide();
- return;
- }
-
- for (i = 0; i < data.length; i++) {
- if (i < limit - 1) {
- chartData.push([data[i].name.trunc(36), data[i].metric]);
- } else {
- aggregate += data[i].metric;
- }
-
- if (!data[i].link) {
- if (data[i].id) {
- data[i].link = makeLink(data[i].id, data[i].name, link_param);
- } else {
- data[i].link = data[i].name
- }
- }
-
- if (data[i].core == "master") {
- data[i].link += ' ✻'
- } else if (data[i].core) {
- data[i].link += " ✬ " + data[i].core + " ";
- }
-
- tableData.push(data[i]);
- }
-
- if (i == limit) {
- chartData.push([data[i - 1].name.trunc(36), data[i - 1].metric]);
- } else if (i > limit) {
- chartData.push(["others", aggregate]);
- }
-
- if (!table_column_names) {
- table_column_names = ["index", "link", "metric"];
- }
- var tableColumns = [];
- var sort_by_column = 0;
- for (i = 0; i < table_column_names.length; i++) {
- tableColumns.push({"mData": table_column_names[i]});
- if (table_column_names[i] == "metric") {
- sort_by_column = i;
- }
- }
-
- if (table_id) {
- $("#" + table_id).dataTable({
- "aLengthMenu": [
- [10, 25, 50, -1],
- [10, 25, 50, "All"]
- ],
- "aaSorting": [
- [ sort_by_column, "desc" ]
- ],
- "sPaginationType": "full_numbers",
- "iDisplayLength": 10,
- "aaData": tableData,
- "aoColumns": tableColumns
- });
- }
-
- if (chart_id) {
- var plot = $.jqplot(chart_id, [chartData], {
- seriesDefaults: {
- renderer: jQuery.jqplot.PieRenderer,
- rendererOptions: {
- showDataLabels: true
- }
- },
- legend: { show: true, location: 'e' }
- });
- }
- }
- });
- });
-}
-
-function renderBarChart(chart_id, chart_data) {
- $.jqplot(chart_id, chart_data, {
- seriesDefaults: {
- renderer: $.jqplot.BarRenderer,
- rendererOptions: {
- barMargin: 1
- },
- pointLabels: {show: true}
- },
- axes: {
- xaxis: {
- renderer: $.jqplot.CategoryAxisRenderer,
- label: "Age"
- },
- yaxis: {
- label: "Count",
- labelRenderer: $.jqplot.CanvasAxisLabelRenderer
- }
- }
- });
-}
-
-function renderPunchCard(chart_id, chart_data) {
- $.jqplot(chart_id, chart_data, {
- seriesDefaults:{
- renderer: $.jqplot.BubbleRenderer,
- rendererOptions: {
- varyBubbleColors: false,
- color: '#a09898',
- autoscalePointsFactor: -0.25,
- highlightAlpha: 0.7
- },
- shadow: true,
- shadowAlpha: 0.05
- },
- axesDefaults: {
- tickRenderer: $.jqplot.CanvasAxisTickRenderer
- },
- axes: {
- xaxis: {
- label: 'hour, UTC',
- labelRenderer: $.jqplot.CanvasAxisLabelRenderer,
- tickOptions: {
- formatter: function (format, val) {
- if (val < 0 || val > 23) { return "" }
- return val;
- }
- }
- },
- yaxis: {
- label: 'day of week',
- labelRenderer: $.jqplot.CanvasAxisLabelRenderer,
- tickOptions: {
- formatter: function (format, val) {
- if (val < 0 || val > 6) { return "" }
- var labels = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"].reverse();
- return labels[val];
- }
- }
- }
- }
- });
-}
-
-function extendWithGravatar(record, image_size) {
- var gravatar = "stackalytics";
- if (record.author_email) {
- gravatar = record.author_email;
- } else if (record.emails && record.emails.length > 0) {
- gravatar = record.emails[0];
- } else if (record.user_id) {
- gravatar = record.user_id;
- }
- record.gravatar = $.gravatarImageURI(gravatar, {
- "image": "wavatar",
- "rating": "g",
- "size": image_size? image_size: 64
- });
-}
-
-function extendWithTweet(record) {
- var tweet = null;
-
- if (record.record_type == "commit") {
- tweet = "«" + record.subject + "» is committed by " + record.author_name + " in " + record.module;
- } else if (record.record_type == "mark") {
- if (record.type == "Workflow" && record.value == 1) {
- tweet = record.author_name + " approved «" + record.parent_subject + "» in " + record.module + ":P";
- } else if (record.type == "Self-Workflow" && record.value == 1) {
- tweet = record.author_name + " self-approved patch in " + record.module;
- } else if (record.type == "Workflow" && record.value == -1) {
- tweet = record.author_name + " work in progress on patch in " + record.module;
- } else if (record.type == "Abandon" || record.type == "Self-Abandon") {
- tweet = record.author_name + " abandoned patch in " + record.module;
- } else {
- var smile = [";(", ":(", "", ":)", ":D"][record.value + 2];
- tweet = "Got " + ((record.value > 0)? "+": "") + record.value + " from " + record.author_name + " on patch in " + record.module + smile;
- }
- } else if (record.record_type == "review") {
- tweet = record.status + " change request by " + record.author_name + " in " + record.module;
- } else if (record.record_type == "patch") {
- tweet = record.author_name + " submitted «" + record.parent_subject + "» in " + record.module;
- } else if (record.record_type == "email") {
- tweet = record.author_name + " emails about " + record.subject;
- } else if (record.record_type == "bpd" || record.record_type == "bpc") {
- tweet = "Blueprint «" + record.title + "» in " + record.module;
- } else if (record.record_type == "bugf" || record.record_type == "bugr") {
- tweet = record.status + " bug «" + record.title + "» in " + record.module + " " + record.web_link;
- } else if (record.record_type == "tr") {
- tweet = record.author_name + " translated " + record.loc + " words into " + record.language;
- }
-
- record.tweet = tweet;
- record.tweet_url = "http://stackalytics.com/report/record/" + record.primary_key;
-}
-
-function encodeURI(s) {
- s = encodeURIComponent(s);
- s = s.replace("*", "%2A");
- return s;
-}
-
-function getUrlVars() {
- var vars = {};
- window.location.href.replace(/[?&]+([^=&]+)=([^&]*)/gi, function (m, key, value) {
- vars[key] = decodeURIComponent(value);
- });
- return vars;
-}
-
-function makeLink(id, title, param_name) {
- var options = {};
- options[param_name] = id.toLowerCase();
- var link = makeURI("/", options);
- return "" + title + " "
-}
-
-function makeURI(uri, options) {
- var ops = {};
- $.extend(ops, getUrlVars());
- if (options != null) {
- $.extend(ops, options);
- }
- var str = $.map(ops,function (val, index) {
- return index + "=" + encodeURI(("" + val).replace("&", "")).toLowerCase();
- }).join("&");
-
- return (str == "") ? uri : uri + "?" + str;
-}
-
-function getPageState() {
- return {
- release: $('#release').val(),
- project_type: $('#project_type').val(),
- module: $('#module').val(),
- company: $('#company').val(),
- user_id: $('#user').val(),
- metric: $('#metric').val()
- };
-}
-
-function reload(extra) {
- window.location.search = $.map($.extend(getUrlVars(), extra), function (val, index) {
- return val? (index + "=" + val) : null;
- }).join("&")
-}
-
-function initSingleSelector(name, api_url, select2_extra_options, change_handler) {
- var selectorId = "#" + name + "_selector";
-
- $(selectorId).val(0).select2({
- data: [
- {id: 0, text: "Loading..." }
- ],
- formatSelection: function (item) {
- return "" + item.text + "
"
- }
- }).select2("enable", false);
-
- $.ajax({
- url: api_url,
- dataType: "json",
- success: function (data) {
- var initial_value = getUrlVars()[name];
- if (initial_value) {
- initial_value = (initial_value).toLocaleLowerCase();
- } else if (data["default"]) {
- initial_value = data["default"];
- }
- $(selectorId).
- val(initial_value).
- select2($.extend({
- data: data["data"]
- }, select2_extra_options)).
- on("select2-selecting",function (e) { /* don't use 'change' event, because it changes value and only after refreshes the page */
- var options = {};
- options[name] = e.val;
- if (change_handler) {
- change_handler(options);
- }
- reload(options);
- }).
- on("select2-removed",function (e) {
- var options = {};
- options[name] = '';
- reload(options);
- }).
- select2("enable", true);
- }
- });
-}
-
-function initSelectors(base_url) {
- initSingleSelector("release", makeURI(base_url + "/api/1.0/releases"));
- initSingleSelector("project_type", makeURI(base_url + "/api/1.0/project_types"), {
- formatResultCssClass: function (item) {
- return (item.child) ? "project_group_item" : "project_group";
- }
- }, function (options) {
- options['module'] = null;
- });
- initSingleSelector("module", makeURI(base_url + "/api/1.0/modules", {tags: "module,program,group"}), {
- formatResultCssClass: function (item) {
- return (item.tag)? ("select_module_" + item.tag): "";
- },
- allowClear: true
- });
- initSingleSelector("company", makeURI(base_url + "/api/1.0/companies"), {allowClear: true});
- initSingleSelector("user_id", makeURI(base_url + "/api/1.0/users"), {allowClear: true});
- initSingleSelector("metric", makeURI(base_url + "/api/1.0/metrics"));
-}
diff --git a/stackalytics/dashboard/templates/404.html b/stackalytics/dashboard/templates/404.html
deleted file mode 100644
index f85c4ea8f..000000000
--- a/stackalytics/dashboard/templates/404.html
+++ /dev/null
@@ -1,12 +0,0 @@
-{% block head %}
-
-{% endblock %}
-
-{% block body %}
-
- 404 Not Found
-
- The requested page is not found. The page will be automatically redirected to
Main
-
-
-{% endblock %}
diff --git a/stackalytics/dashboard/templates/_macros/activity_log.html b/stackalytics/dashboard/templates/_macros/activity_log.html
deleted file mode 100644
index 44e11eb0f..000000000
--- a/stackalytics/dashboard/templates/_macros/activity_log.html
+++ /dev/null
@@ -1,220 +0,0 @@
-{% macro show_activity_log(user_id=None, company=None, blueprint_id=None,
-show_record_type=True, show_user_gravatar=True, gravatar_size=32, show_all=True,
-show_twitter=False) -%}
-
-{% if show_twitter %}
-
-{% endif %}
-
-
-
-
-
-
-
-
-
-
-{%- endmacro %}
diff --git a/stackalytics/dashboard/templates/_macros/contribution_summary.html b/stackalytics/dashboard/templates/_macros/contribution_summary.html
deleted file mode 100644
index 5872dba42..000000000
--- a/stackalytics/dashboard/templates/_macros/contribution_summary.html
+++ /dev/null
@@ -1,58 +0,0 @@
-{% macro show_contribution_summary(user_id=None, company=None, show_all=True) -%}
-
-
-
-
-
- Contribution Summary
-
-
-{%- endmacro %}
diff --git a/stackalytics/dashboard/templates/_macros/module_details.html b/stackalytics/dashboard/templates/_macros/module_details.html
deleted file mode 100644
index 15c61e6ac..000000000
--- a/stackalytics/dashboard/templates/_macros/module_details.html
+++ /dev/null
@@ -1,62 +0,0 @@
-{% macro show_module_details(module) -%}
-
-
-
-
-
-
-
-{%- endmacro %}
diff --git a/stackalytics/dashboard/templates/_macros/user_profile.html b/stackalytics/dashboard/templates/_macros/user_profile.html
deleted file mode 100644
index 4fe37531d..000000000
--- a/stackalytics/dashboard/templates/_macros/user_profile.html
+++ /dev/null
@@ -1,61 +0,0 @@
-{% macro show_user_profile(user_id) -%}
-
-
-
-
-
-
-
-{%- endmacro %}
diff --git a/stackalytics/dashboard/templates/base.html b/stackalytics/dashboard/templates/base.html
deleted file mode 100644
index e3ba390d4..000000000
--- a/stackalytics/dashboard/templates/base.html
+++ /dev/null
@@ -1,82 +0,0 @@
-
-
-
-
-
-
- Stackalytics {% if page_title %}| {{ page_title }} {% endif %}
-
- {% if not page_title %}
-
- {% else %}
-
- {% endif %}
-
-
-
-
-
- {% if page_title %}
-
-
-
-
-
-
- {% endif %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% if active_tab == 'driverlog' %}
-
- {% else %}
-
-
- {% endif %}
-
- {% block head %}{% endblock %}
-
-
-
-
-
-
- {% block body %}{% endblock %}
-
-
-
-
\ No newline at end of file
diff --git a/stackalytics/dashboard/templates/kpi/base_kpi.html b/stackalytics/dashboard/templates/kpi/base_kpi.html
deleted file mode 100644
index face3bc21..000000000
--- a/stackalytics/dashboard/templates/kpi/base_kpi.html
+++ /dev/null
@@ -1,30 +0,0 @@
-{% extends "base.html" %}
-
-{% block head %}
-
-
-
- {% block scripts %}{% endblock %}
-{% endblock %}
-
-{% block body %}
-
-
-
-
-{% block content %}
-{% endblock %}
-
-
-{% endblock %}
diff --git a/stackalytics/dashboard/templates/kpi/example.html b/stackalytics/dashboard/templates/kpi/example.html
deleted file mode 100644
index 5d2aeb9ec..000000000
--- a/stackalytics/dashboard/templates/kpi/example.html
+++ /dev/null
@@ -1,51 +0,0 @@
-{% extends "kpi/base_kpi.html" %}
-
-{% set page_title = 'Example of KPI report' %}
-
-{% block scripts %}
-
-{% endblock %}
-
-
-{% block content %}
- Example of KPI report
-
- Position in top
-
-
- Percentage in top
-
-
- Core status
-
-{% endblock %}
diff --git a/stackalytics/dashboard/templates/kpi/script.html b/stackalytics/dashboard/templates/kpi/script.html
deleted file mode 100644
index 71cf0e661..000000000
--- a/stackalytics/dashboard/templates/kpi/script.html
+++ /dev/null
@@ -1,80 +0,0 @@
-{% extends "kpi/base_kpi.html" %}
-
-{% set page_title = 'Example of scripted KPI report' %}
-
-{% block scripts %}
-
-{% endblock %}
-
-
-{% block content %}
- KPI report can be configured by script or loaded from URL.
-
-
-
-
-
- Update
-
-
-{% endblock %}
diff --git a/stackalytics/dashboard/templates/layout.html b/stackalytics/dashboard/templates/layout.html
deleted file mode 100644
index a9fdd60d5..000000000
--- a/stackalytics/dashboard/templates/layout.html
+++ /dev/null
@@ -1,115 +0,0 @@
-{% extends "base.html" %}
-
-{% block head %}
-
-
-
-{% block scripts %}{% endblock %}
-
-{% endblock %}
-
-{% block body %}
-
-
- {% if not runtime_storage_update_time %}
-
The data is being loaded now and is not complete
- {% set update_time_title = '' %}
- {% else %}
- {% if runtime_storage_update_time is too_old %}
-
The data was last updated on {{ runtime_storage_update_time_str }}
- {% endif %}
- {% set update_time_title = 'Last updated on ' + runtime_storage_update_time_str %}
- {% endif %}
-
-
-
-
-
-
-
-
- {% block left_frame %}{% endblock %}
-
-
-
-
- {% block right_frame %}{% endblock %}
-
-
-
-
-
-
-
-
-{% endblock %}
diff --git a/stackalytics/dashboard/templates/overview.html b/stackalytics/dashboard/templates/overview.html
deleted file mode 100644
index 1c9e03a5c..000000000
--- a/stackalytics/dashboard/templates/overview.html
+++ /dev/null
@@ -1,224 +0,0 @@
-{% extends "layout.html" %}
-{% import '_macros/activity_log.html' as activity_log %}
-{% import '_macros/contribution_summary.html' as contribution_summary %}
-{% import '_macros/user_profile.html' as user_profile %}
-{% import '_macros/module_details.html' as module_details %}
-
-{% set show_company_breakdown = (not company) and (not user_id) %}
-{% set show_engineer_breakdown = (not user_id) %}
-{% set show_bp_breakdown = (metric in ['bpd', 'bpc']) %}
-{% set show_module_breakdown = (not module) %}
-{% set show_languages_breakdown = (metric in ['translations']) %}
-{% set show_user_activity = (user_id) %}
-{% set show_module_activity = (module) and (not user_id) %}
-{% set show_activity = (show_user_activity) or (show_module_activity) %}
-{% set show_contribution_on_left = (not user_id) and (module) %}
-{% set show_contribution_on_right = (user_id) or (company and not module) %}
-{% set show_user_profile = (user_id) %}
-{% set show_module_details = (module) %}
-{% set show_review_ratio = (metric in ['marks']) %}
-
-{% macro show_report_links(module=None, company=None, user_id=None) -%}
- {% if module %}
-
-
-
-
- {% endif %}
- {% if company %}
-
- {% endif %}
- {% if user_id %}
-
- {% endif %}
-{%- endmacro %}
-
-{% block scripts %}
-
-
-{% endblock %}
-
-{% block left_frame %}
-
- {% if show_company_breakdown %}
-
-
Contribution by companies
-
-
-
-
-
-
- #
- Company
- {{ metric_label }}
-
-
-
-
-
-
-
- {% endif %}
-
- {% if show_engineer_breakdown %}
-
-
Contribution by contributors
-
-
-
-
-
-
- #
- Contributor
- {% if show_review_ratio %}
- -2|-1|+1|+2|A|x (+ ratio)
- {% endif %}
- {{ metric_label }}
-
-
-
-
-
-
-
- {% endif %}
-
- {% if show_user_profile %}
- {{ user_profile.show_user_profile(user_id=user_id) }}
- {% endif %}
- {% if show_user_activity %}
- {{ activity_log.show_activity_log(gravatar_size=32, show_all=False) }}
- {% endif %}
-
- {% if show_contribution_on_left %}
- {{ contribution_summary.show_contribution_summary(show_all=False) }}
- {{ show_report_links(module, company, user_id) }}
- {% endif %}
-
-{% endblock %}
-
-{% block right_frame %}
-
- {% if show_languages_breakdown %}
-
- {% if language %}
-
- Language
-
-
-
- {% else %}
-
Languages
-
-
-
-
-
-
- #
- Language
- Translations
-
-
-
-
-
- {% endif %}
-
-
- {% endif %}
-
- {% if show_module_breakdown %}
-
-
Contribution by modules
-
-
-
-
-
-
- #
- Module
- {{ metric_label }}
-
-
-
-
-
-
-
- {% endif %}
-
- {% if show_module_details %}
- {{ module_details.show_module_details(module=module) }}
- {% endif %}
-
- {% if show_bp_breakdown %}
-
-
Blueprint popularity
-
-
- This metric shows how many times a blueprint was mentioned in emails and commit messages.
-
-
-
-
-
-
-
- #
- Blueprint
- Status
- Date
- Mentions
-
-
-
-
-
-
-
- {% endif %}
-
- {% if show_contribution_on_right %}
- {{ contribution_summary.show_contribution_summary(show_all=False) }}
- {{ show_report_links(module, company, user_id) }}
- {% endif %}
-
- {% if show_module_activity %}
- {{ activity_log.show_activity_log(gravatar_size=32, show_all=False) }}
- {% endif %}
-
-{% endblock %}
diff --git a/stackalytics/dashboard/templates/reports/activity.html b/stackalytics/dashboard/templates/reports/activity.html
deleted file mode 100644
index 1eba5473c..000000000
--- a/stackalytics/dashboard/templates/reports/activity.html
+++ /dev/null
@@ -1,7 +0,0 @@
-{% extends "reports/base_report.html" %}
-{% import '_macros/activity_log.html' as activity_log %}
-
-{% block content %}
-{{ activity_log.show_activity_log(gravatar_size=64, show_all=False,
- show_twitter=True) }}
-{% endblock %}
diff --git a/stackalytics/dashboard/templates/reports/affiliation_changes.html b/stackalytics/dashboard/templates/reports/affiliation_changes.html
deleted file mode 100644
index 0ff23c047..000000000
--- a/stackalytics/dashboard/templates/reports/affiliation_changes.html
+++ /dev/null
@@ -1,160 +0,0 @@
-{% extends "reports/base_report.html" %}
-
-{% set page_title = 'Company Affiliation Changes' %}
-
-{% block scripts %}
-
-
-{% endblock %}
-
-{% block content %}
-Company Affiliation Changes
-
- Start of the period: End of the period:
-
-
-
-
-
- From
- To
- Count
- Users
-
-
-
-
-
-
-
-{% endblock %}
diff --git a/stackalytics/dashboard/templates/reports/base_report.html b/stackalytics/dashboard/templates/reports/base_report.html
deleted file mode 100644
index 0518f2761..000000000
--- a/stackalytics/dashboard/templates/reports/base_report.html
+++ /dev/null
@@ -1,19 +0,0 @@
-{% extends "base.html" %}
-
-{% block head %}
- {% block scripts %}{% endblock %}
-{% endblock %}
-
-{% block body %}
-
-
-
-
-{% block content %}
-{% endblock %}
-
-
-{% endblock %}
diff --git a/stackalytics/dashboard/templates/reports/blueprint_summary.html b/stackalytics/dashboard/templates/reports/blueprint_summary.html
deleted file mode 100644
index 3b18917cc..000000000
--- a/stackalytics/dashboard/templates/reports/blueprint_summary.html
+++ /dev/null
@@ -1,44 +0,0 @@
-{% extends "reports/base_report.html" %}
-{% import '_macros/activity_log.html' as activity_log %}
-
-{% set page_title = 'The activity log for blueprint "' + blueprint.title + '"' %}
-
-{% block content %}
-Blueprint “{{ blueprint.title }}”
-
-Name: {{ blueprint.name }}
-
-Module: {{ blueprint.module_link | safe }}
-Status: {{blueprint.lifecycle_status}}
-Priority: {{blueprint.priority}}
-Definition Status: {{blueprint.definition_status}}
-Implementation Status: {{blueprint.implementation_status}}
-Direction: {% if blueprint.direction_approved %} Approved {% else %} Needs Approval {% endif %}
-Registered By: {{ blueprint.author_link | safe }} ({{ blueprint.company_link | safe }})
-Registered On: {{ blueprint.date_str }}
-{% if blueprint.mention_count %}
-Popularity: mentioned {{blueprint.mention_count}} times, last on {{blueprint.mention_date_str}}
-{% endif %}
-{% if blueprint.specification_url %}
-
-{% endif %}
-
-{% if blueprint.summary %}
-Summary
-{{ blueprint.summary | safe }}
-{% endif %}
-
-{% if blueprint.whiteboard %}
-Whiteboard
-{{ blueprint.whiteboard }}
-{% endif %}
-
-{{ activity_log.show_activity_log(blueprint_id=blueprint.id, gravatar_size=64) }}
-
-{% endblock %}
diff --git a/stackalytics/dashboard/templates/reports/company_activity.html b/stackalytics/dashboard/templates/reports/company_activity.html
deleted file mode 100644
index 5b7a7c4b4..000000000
--- a/stackalytics/dashboard/templates/reports/company_activity.html
+++ /dev/null
@@ -1,25 +0,0 @@
-{% extends "reports/base_report.html" %}
-{% import '_macros/activity_log.html' as activity_log %}
-{% import '_macros/contribution_summary.html' as contribution_summary %}
-
-{% set page_title = company_name + " activity report" %}
-
-{% block scripts %}
-
-{% endblock %}
-
-{% block content %}
-{{ company_name }} activity report
-
-{{ contribution_summary.show_contribution_summary(company=company_name) }}
-
-
-
-{{ activity_log.show_activity_log(company=company_name, gravatar_size=64,
- show_twitter=True) }}
-
-{% endblock %}
diff --git a/stackalytics/dashboard/templates/reports/contribution.html b/stackalytics/dashboard/templates/reports/contribution.html
deleted file mode 100644
index e5c3654f4..000000000
--- a/stackalytics/dashboard/templates/reports/contribution.html
+++ /dev/null
@@ -1,167 +0,0 @@
-{% extends "reports/base_report.html" %}
-
-{% set page_title = "Contribution into " + module + " during the recent " + days + " days" %}
-
-{% block scripts %}
-
-
-
-
-
-
-{% endblock %}
-
-{% block content %}
-Contribution into {{ module }} for the last {{ days }} days
-
-
-
-
- #
- Engineer
- Reviews
- -2
- -1
- +1
- +2
- A
- x
- + %
- Disagreements
- Ratio
- On review / patch sets
- Commits
- Emails
-
-
-
-
-
-
-
-
-{% endblock %}
diff --git a/stackalytics/dashboard/templates/reports/cores.html b/stackalytics/dashboard/templates/reports/cores.html
deleted file mode 100644
index f0da1c4c3..000000000
--- a/stackalytics/dashboard/templates/reports/cores.html
+++ /dev/null
@@ -1,192 +0,0 @@
-{% extends "reports/base_report.html" %}
-
-{% set page_title = 'Cores' %}
-
-{% block scripts %}
-
-
-
-
-{% endblock %}
-
-{% block content %}
- Cores in
-
-
-
-
-
-
- Loading...
-
-{% endblock %}
diff --git a/stackalytics/dashboard/templates/reports/driverlog.html b/stackalytics/dashboard/templates/reports/driverlog.html
deleted file mode 100644
index b81ed51ea..000000000
--- a/stackalytics/dashboard/templates/reports/driverlog.html
+++ /dev/null
@@ -1,149 +0,0 @@
-{% extends "base.html" %}
-
-{% set active_tab = 'driverlog' %}
-{% set page_title = "Vendor Drivers" %}
-
-{% block head %}
-
-{% endblock %}
-
-
-{% block body %}
-
-
-
Information on drivers is in beta and may contain some inaccuracies. If you see an error,
please help us make the service better by
-
filing a bug or an update request
-
-
-
-{% endblock %}
diff --git a/stackalytics/dashboard/templates/reports/members.html b/stackalytics/dashboard/templates/reports/members.html
deleted file mode 100644
index 88dd8bf69..000000000
--- a/stackalytics/dashboard/templates/reports/members.html
+++ /dev/null
@@ -1,405 +0,0 @@
-{% extends "base.html" %}
-
-{% set active_tab = 'members' %}
-{% set page_title = 'OpenStack Foundation members' %}
-
-{% block head %}
-
-
-
-
-
-
-{% endblock %}
-
-{% block body %}
-
-
-
-
-
-{% endblock %}
diff --git a/stackalytics/dashboard/templates/reports/open_reviews.html b/stackalytics/dashboard/templates/reports/open_reviews.html
deleted file mode 100644
index a44d1524a..000000000
--- a/stackalytics/dashboard/templates/reports/open_reviews.html
+++ /dev/null
@@ -1,105 +0,0 @@
-{% extends "reports/base_report.html" %}
-
-{% set page_title = "Open change requests in " + module %}
-
-{% block scripts %}
-
-{% endblock %}
-
-{% block content %}
-Open change requests in {{ module }}
-
-Summary
-
- Total open reviews: {{ total_open }}
- Waiting on submitter: {{ waiting_on_submitter }}
- Waiting on reviewer: {{ waiting_on_reviewer }}
- Waiting on CI (no votes yet): {{ waiting_on_ci }}
-
-
-{% if waiting_on_reviewer %}
-Change requests waiting for reviewers since the last vote or mark
-
-
- Average wait time: {{ reviewer_latest_revision.average }}
- Max wait time: {{ reviewer_latest_revision.max }}
-
-
-
-
-
-{% for item in reviewer_latest_revision.reviews[:5] %}
- {{ item.updated_on_age }} {{ item.url }} {{ item.subject }} by {{ item.author_link|safe }} ({{ item.company_link|safe }})
-{% endfor %}
-
-
-Change requests waiting for reviewers since the creation date
-
-
- Average wait time: {{ reviewer_first_revision.average }}
- Max wait time: {{ reviewer_first_revision.max }}
-
-
-
-
-
-{% for item in reviewer_first_revision.reviews[:5] %}
- {{ item.date_age }} {{ item.url }} {{ item.subject }} by {{ item.author_link|safe }} ({{ item.company_link|safe }})
-{% endfor %}
-
-
-{% else %}
- No change requests waiting for reviewers
-{% endif %}
-
-
-{% if waiting_on_submitter %}
-Change requests waiting for submitters since the last vote or mark
-
-
- Average wait time: {{ submitter_latest_revision.average }}
- Max wait time: {{ submitter_latest_revision.max }}
-
-
-
-
-
-{% for item in submitter_latest_revision.reviews[:5] %}
- {{ item.updated_on_age }} {{ item.url }} {{ item.subject }} by {{ item.author_link|safe }} ({{ item.company_link|safe }})
-{% endfor %}
-
-
-Change requests waiting for submitters since the creation date
-
-
- Average wait time: {{ submitter_first_revision.average }}
- Max wait time: {{ submitter_first_revision.max }}
-
-
-
-
-
-{% for item in submitter_first_revision.reviews[:5] %}
- {{ item.date_age }} {{ item.url }} {{ item.subject }} by {{ item.author_link|safe }} ({{ item.company_link|safe }})
-{% endfor %}
-
-
-{% else %}
- No change requests waiting for submitters
-{% endif %}
-{% endblock %}
diff --git a/stackalytics/dashboard/templates/reports/record.html b/stackalytics/dashboard/templates/reports/record.html
deleted file mode 100644
index 4eee4b070..000000000
--- a/stackalytics/dashboard/templates/reports/record.html
+++ /dev/null
@@ -1,219 +0,0 @@
-{% extends "reports/base_report.html" %}
-
-{% set gravatar_size = 64 %}
-
-{% block scripts %}
-
-
-
-{% endblock %}
-
-{% block content %}
-
- Contribution Record
-
-
-
-
-
-
-
-
-
- {% if record.coauthor %}
-
- {% else %}
-
- {% endif %}
-
-
-
- {% if record.record_type == "commit" %}
-
-
- {% if record.commit_date_str != "" and record.commit_date_str != date_str %}
-
Commit date: {{ record.commit_date_str }}
- {% endif %}
- {% if record.correction_comment != "" %}
-
Commit corrected:
- {{ record.correction_comment }}
- {% endif %}
-
+{{ record.lines_added }}
- - {{ record.lines_deleted }}
-
-
- {% elif record.record_type == "mark" %}
-
-
Change request by: {{ record.parent_author_link | safe }}
- ({{ record.parent_company_link | safe }})
-
- {% if record.patch_author_link != record.parent_author_link %}
-
Patch by: {{ record.patch_author_link | safe }}
- ({{ record.patch_company_link | safe }})
-
- {% endif %}
-
-
- {% if (record.type == "Workflow" and record.value == 1) %}
- Approve
- {% elif (record.type == "Self-Workflow" and record.value == 1) %}
- Self-Approve
- {% elif (record.type == "Workflow" and record.value == -1) %}
- Work in progress
- {% elif (record.type == "Abandon" or record.type == "Self-Abandon") %}
- {{ record.type }}
- {% else %}
- {{ record.type }}:
- {% if record.value > 0 %}+{% endif %}{{ record.value }}
- {% endif %}
-
-
- {% elif record.record_type == "review" %}
-
-
Current Status: {{ record.status }}
-
-
- {% elif record.record_type == "patch" %}
-
-
-
- {% elif record.record_type == "email" %}
-
- {% if blueprint_id_count %}
-
Mentions blueprints:
- {% for value in blueprint_links %}
- {{ value | safe }}
- {% endfor %}
-
- {% endif %}
- {% if body %}
-
Email:
-
- {% endif %}
-
- {% elif ((record.record_type == "bpd") or (record.record_type == "bpc")) %}
-
-
-
-
Priority: {{ record.priority }}
-
-
Status: {{ record.lifecycle_status }}
- ({{ record.definition_status }} ,
- {{ record.implementation_status }} )
-
- {% if mention_count %}
-
Mention count: {{ record.mention_count }}, last mention
- on {{ record.mention_date_str }}
- {% endif %}
-
- {% elif ((record.record_type == "bugf") or (record.record_type == "bugr")) %}
-
-
Status: {{ record.status }}
-
-
Importance: {{ record.importance }}
-
-
- {% elif record.record_type == "ci" %}
-
-
{% if value == true %}
- Success {% else %}
- Failure {% endif %}
-
-
-
- {% elif record_type == "member" %}
-
-
- {% elif record_type == "tr" %}
-
- {% endif %}
-
-
-
-
-
-
-
-
-
-{% endblock %}
\ No newline at end of file
diff --git a/stackalytics/dashboard/templates/reports/user_activity.html b/stackalytics/dashboard/templates/reports/user_activity.html
deleted file mode 100644
index 91a021ed1..000000000
--- a/stackalytics/dashboard/templates/reports/user_activity.html
+++ /dev/null
@@ -1,27 +0,0 @@
-{% extends "reports/base_report.html" %}
-{% import '_macros/activity_log.html' as activity_log %}
-{% import '_macros/contribution_summary.html' as contribution_summary %}
-{% import '_macros/user_profile.html' as user_profile %}
-
-{% set page_title = user.user_name + " activity report" %}
-
-{% block scripts %}
-
-{% endblock %}
-
-{% block content %}
-{{ user.user_name }} activity report
-
-{{ user_profile.show_user_profile(user_id=user.user_id) }}
-{{ contribution_summary.show_contribution_summary(user_id=user.user_id) }}
-
-
-
-{{ activity_log.show_activity_log(user_id=user.user_id,
- show_user_gravatar=false, gravatar_size=64, show_twitter=True) }}
-
-{% endblock %}
diff --git a/stackalytics/dashboard/templates/widget.html b/stackalytics/dashboard/templates/widget.html
deleted file mode 100644
index 4f4c531cc..000000000
--- a/stackalytics/dashboard/templates/widget.html
+++ /dev/null
@@ -1,137 +0,0 @@
-
-
-
-
-
-Stackalytics Widget
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
OpenStack® Contribution Tracker
-
-
-
-
-
- Release
-
-
-
-
- Project Type
-
-
-
-
- Metric
-
-
-
-
-
-
-
diff --git a/stackalytics/dashboard/vault.py b/stackalytics/dashboard/vault.py
deleted file mode 100644
index d0a42d9af..000000000
--- a/stackalytics/dashboard/vault.py
+++ /dev/null
@@ -1,252 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-from datetime import timedelta
-import os
-
-import flask
-from oslo_config import cfg
-from oslo_log import log as logging
-import six
-
-from stackalytics.dashboard import memory_storage
-from stackalytics.processor import runtime_storage
-from stackalytics.processor import user_processor
-from stackalytics.processor import utils
-
-
-CONF = cfg.CONF
-LOG = logging.getLogger(__name__)
-
-
-RECORD_FIELDS_FOR_AGGREGATE = ['record_id', 'primary_key', 'record_type',
- 'company_name', 'module', 'user_id', 'release',
- 'date', 'week', 'author_name', 'loc', 'type',
- 'disagreement', 'value', 'status',
- 'blueprint_id']
-
-CompactRecord = collections.namedtuple('CompactRecord',
- RECORD_FIELDS_FOR_AGGREGATE)
-
-_unihash = {}
-
-
-def uniintern(o):
- if isinstance(o, str):
- return six.moves.intern(o)
- if not isinstance(o, six.string_types[0]):
- return o
- if isinstance(o, six.text_type):
- return _unihash.setdefault(o, o)
-
-
-def compact_records(records):
- for record in records:
- compact = dict((k, uniintern(record.get(k)))
- for k in RECORD_FIELDS_FOR_AGGREGATE)
-
- yield CompactRecord(**compact)
-
-
-def extend_record(record):
- runtime_storage_inst = get_runtime_storage()
- return runtime_storage_inst.get_by_key(
- runtime_storage_inst._get_record_name(record.record_id))
-
-
-def get_vault():
- vault = getattr(flask.current_app, 'stackalytics_vault', None)
- if not vault:
- try:
- vault = {}
- runtime_storage_inst = runtime_storage.get_runtime_storage(
- CONF.runtime_storage_uri)
- vault['runtime_storage'] = runtime_storage_inst
- vault['memory_storage'] = memory_storage.get_memory_storage(
- memory_storage.MEMORY_STORAGE_CACHED)
-
- flask.current_app.stackalytics_vault = vault
- except Exception as e:
- LOG.critical('Failed to initialize application: %s', e,
- exc_info=True)
- flask.abort(500)
-
- if not getattr(flask.request, 'stackalytics_updated', None):
- time_now = utils.date_to_timestamp('now')
- may_update_by_time = time_now > vault.get('vault_next_update_time', 0)
- if may_update_by_time:
- flask.request.stackalytics_updated = True
- vault['vault_update_time'] = time_now
- vault['vault_next_update_time'] = (
- time_now + CONF.dashboard_update_interval)
- memory_storage_inst = vault['memory_storage']
- have_updates = memory_storage_inst.update(compact_records(
- vault['runtime_storage'].get_update(os.getpid())))
- vault['runtime_storage_update_time'] = (
- vault['runtime_storage'].get_by_key(
- 'runtime_storage_update_time'))
-
- if have_updates:
- vault['cache'] = {}
- vault['cache_size'] = 0
- _init_releases(vault)
- _init_module_groups(vault)
- _init_project_types(vault)
- _init_repos(vault)
- _init_user_index(vault)
-
- return vault
-
-
-def get_memory_storage():
- return get_vault()['memory_storage']
-
-
-def get_runtime_storage():
- return get_vault()['runtime_storage']
-
-
-def _init_releases(vault):
- runtime_storage_inst = vault['runtime_storage']
- releases = runtime_storage_inst.get_by_key('releases')
- releases_map = collections.OrderedDict()
-
- if releases:
- first_day = vault['memory_storage'].get_first_record_day()
- releases[0]['start_date'] = int(
- timedelta(days=first_day).total_seconds())
-
- vault['start_date'] = releases[0]['end_date']
- vault['end_date'] = releases[-1]['end_date']
- start_date = releases[0]['end_date']
- for r in releases[1:]:
- r['start_date'] = start_date
- start_date = r['end_date']
-
- releases_map.update((r['release_name'].lower(), r) for r in releases)
-
- if releases_map.pop('prehistory', None):
- LOG.debug("Ignore 'prehistory' release")
-
- vault['releases'] = releases_map
-
-
-def _init_module_groups(vault):
- runtime_storage_inst = vault['runtime_storage']
- module_groups = runtime_storage_inst.get_by_key('module_groups') or {}
-
- vault['module_id_index'] = module_groups
-
-
-def _init_project_types(vault):
- runtime_storage_inst = vault['runtime_storage']
- project_types = runtime_storage_inst.get_by_key('project_types') or {}
-
- # add ref from child to parent
- parent_pt = None
- for pt in project_types:
- if pt.get('child'):
- pt['parent'] = parent_pt
- else:
- parent_pt = pt
-
- vault['project_types'] = project_types
- vault['project_types_index'] = dict((pt['id'], pt) for pt in project_types)
-
-
-def _init_repos(vault):
- runtime_storage_inst = vault['runtime_storage']
- repos = runtime_storage_inst.get_by_key('repos') or {}
-
- vault['repos_index'] = dict((r['module'], r) for r in repos)
-
-
-def _init_user_index(vault):
- vault['user_index'] = {}
-
-
-def get_project_types():
- return get_vault()['project_types']
-
-
-def is_project_type_valid(project_type):
- if not project_type:
- return False
- project_type = project_type.lower()
- project_types = get_vault().get('project_types_index', [])
- return project_type in project_types
-
-
-def get_project_type(project_type_id):
- project_type_id = project_type_id.lower()
- if not is_project_type_valid(project_type_id):
- return None
- return get_vault()['project_types_index'][project_type_id]
-
-
-def get_user_from_runtime_storage(user_id):
- runtime_storage_inst = get_runtime_storage()
- user_index = get_vault()['user_index']
- if user_id not in user_index:
- user_index[user_id] = user_processor.load_user(
- runtime_storage_inst, user_id=user_id)
- return user_index[user_id]
-
-
-def _resolve_modules_for_releases(module_ids, releases):
- module_id_index = get_vault().get('module_id_index') or {}
-
- for module_id in module_ids:
- if module_id in module_id_index:
- module_group = module_id_index[module_id]
-
- if not releases or 'all' in releases:
- if 'releases' in module_group:
- for release, modules in six.iteritems(
- module_group['releases']):
- for module in modules:
- yield module, release
- if 'modules' in module_group:
- for module in module_group['modules']:
- yield module, None
- else:
- for release in releases:
- if 'releases' in module_group:
- for module in module_group['releases'][release]:
- yield module, release
- if 'modules' in module_group:
- for module in module_group['modules']:
- yield module, release
-
-
-def resolve_modules(module_ids, releases):
- all_releases = get_vault()['releases'].keys()
- for module, release in _resolve_modules_for_releases(module_ids, releases):
- if release is None:
- for r in all_releases:
- yield (module, r)
- else:
- yield (module, release)
-
-
-def resolve_project_types(project_types):
- modules = set()
- project_types_index = get_vault()['project_types_index']
- for pt in project_types:
- pt = pt.lower()
- if pt in project_types_index:
- modules |= set(project_types_index[pt]['modules'])
- return modules
diff --git a/stackalytics/dashboard/web.py b/stackalytics/dashboard/web.py
deleted file mode 100644
index a72ae6ba1..000000000
--- a/stackalytics/dashboard/web.py
+++ /dev/null
@@ -1,715 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import operator
-import os
-import time
-
-import flask
-from oslo_config import cfg
-from oslo_log import log as logging
-import six
-
-from stackalytics.dashboard import config
-from stackalytics.dashboard import decorators
-from stackalytics.dashboard import helpers
-from stackalytics.dashboard import kpi
-from stackalytics.dashboard import parameters
-from stackalytics.dashboard import reports
-from stackalytics.dashboard import vault
-from stackalytics.processor import config as processor_cfg
-from stackalytics.processor import utils
-
-# Application objects ---------
-
-app = flask.Flask(__name__)
-app.config.from_object(__name__)
-app.config.from_envvar('DASHBOARD_CONF', silent=True)
-app.register_blueprint(reports.blueprint)
-app.register_blueprint(kpi.blueprint)
-
-LOG = logging.getLogger(__name__)
-
-CONF = cfg.CONF
-CONF.register_opts(processor_cfg.CONNECTION_OPTS + config.DASHBOARD_OPTS)
-
-
-# Handlers ---------
-
-@app.route('/')
-@decorators.templated()
-def overview():
- pass
-
-
-@app.route('/widget')
-def widget():
- return flask.render_template('widget.html')
-
-
-# AJAX Handlers ---------
-
-def _get_aggregated_stats(records, metric_filter, keys, param_id,
- param_title=None, finalize_handler=None):
- param_title = param_title or param_id
- result = dict((c, {'metric': 0, 'id': c}) for c in keys)
- context = {'vault': vault.get_vault()}
- if metric_filter:
- for record in records:
- metric_filter(result, record, param_id, context)
- result[getattr(record, param_id)]['name'] = (
- getattr(record, param_title))
- else:
- for record in records:
- record_param_id = getattr(record, param_id)
- result[record_param_id]['metric'] += 1
- result[record_param_id]['name'] = getattr(record, param_title)
-
- response = [r for r in result.values() if r['metric']]
- if finalize_handler:
- response = [item for item in map(finalize_handler, response) if item]
- response.sort(key=lambda x: x['metric'], reverse=True)
- utils.add_index(response, item_filter=lambda x: x['id'] != '*independent')
- return response
-
-
-@app.route('/api/1.0/new_companies')
-@decorators.exception_handler()
-@decorators.response()
-@decorators.jsonify('stats')
-@decorators.record_filter(ignore=['start_date'])
-def get_new_companies(records, **kwargs):
-
- days = int(flask.request.args.get('days') or reports.DEFAULT_DAYS_COUNT)
- start_date = int(time.time()) - days * 24 * 60 * 60
-
- result = {}
- for record in records:
- company_name = record.company_name
- date = record.date
-
- if company_name not in result or result[company_name] > date:
- result[company_name] = date
-
- response = list(({'name': company_name,
- 'date': result[company_name],
- 'date_str': helpers.format_date(result[company_name])})
- for company_name in result
- if result[company_name] >= start_date)
-
- response.sort(key=lambda x: x['date'], reverse=True)
- utils.add_index(response)
-
- return response
-
-
-@app.route('/api/1.0/stats/companies')
-@decorators.exception_handler()
-@decorators.response()
-@decorators.cached()
-@decorators.jsonify('stats')
-@decorators.record_filter()
-@decorators.aggregate_filter()
-def get_companies(records, metric_filter, finalize_handler, **kwargs):
- return _get_aggregated_stats(records, metric_filter,
- vault.get_memory_storage().get_companies(),
- 'company_name',
- finalize_handler=finalize_handler)
-
-
-@app.route('/api/1.0/stats/modules')
-@decorators.exception_handler()
-@decorators.response()
-@decorators.cached()
-@decorators.jsonify('stats')
-@decorators.record_filter()
-@decorators.aggregate_filter()
-def get_modules(records, metric_filter, finalize_handler, **kwargs):
- return _get_aggregated_stats(records, metric_filter,
- vault.get_memory_storage().get_modules(),
- 'module', finalize_handler=finalize_handler)
-
-
-def get_core_engineer_branch(user, modules):
- is_core = None
- for (module, branch) in (user.get('core') or []):
- if module in modules:
- is_core = branch
- if branch == 'master': # master is preferable, but stables are ok
- break
- return is_core
-
-
-@app.route('/api/1.0/stats/engineers')
-@decorators.exception_handler()
-@decorators.response()
-@decorators.cached()
-@decorators.jsonify('stats')
-@decorators.record_filter()
-@decorators.aggregate_filter()
-def get_engineers(records, metric_filter, finalize_handler, **kwargs):
- modules_names = parameters.get_parameter(kwargs, 'module')
- modules = set([m for m, r in vault.resolve_modules(modules_names, [''])])
-
- def postprocessing(record):
- if finalize_handler:
- record = finalize_handler(record)
- user = vault.get_user_from_runtime_storage(record['id'])
- record['core'] = get_core_engineer_branch(user, modules)
- return record
-
- return _get_aggregated_stats(records, metric_filter,
- vault.get_memory_storage().get_user_ids(),
- 'user_id', 'author_name',
- finalize_handler=postprocessing)
-
-
-@app.route('/api/1.0/stats/engineers_extended')
-@decorators.exception_handler()
-@decorators.response()
-@decorators.cached(ignore=['metric'])
-@decorators.jsonify('stats')
-@decorators.record_filter(ignore=['metric'])
-def get_engineers_extended(records, **kwargs):
- modules_names = parameters.get_parameter(kwargs, 'module')
- modules = set([m for m, r in vault.resolve_modules(modules_names, [''])])
-
- def postprocessing(record):
- record = decorators.mark_finalize(record)
-
- if not (record['mark'] or record['review'] or record['commit'] or
- record['email'] or record['patch']):
- return
-
- user = vault.get_user_from_runtime_storage(record['id'])
- record['company'] = helpers.get_current_company(user)
- record['core'] = get_core_engineer_branch(user, modules)
- return record
-
- def record_processing(result, record, param_id):
- result_row = result[getattr(record, param_id)]
- record_type = record.record_type
- result_row[record_type] = result_row.get(record_type, 0) + 1
- if record_type == 'mark':
- decorators.mark_filter(result, record, param_id, {})
-
- result = {}
- for record in records:
- user_id = record.user_id
- if user_id not in result:
- result[user_id] = {'id': user_id, 'mark': 0, 'review': 0,
- 'commit': 0, 'email': 0, 'patch': 0,
- 'metric': 0}
- record_processing(result, record, 'user_id')
- result[user_id]['name'] = record.author_name
-
- response = result.values()
- response = [item for item in map(postprocessing, response) if item]
- response.sort(key=lambda x: x['metric'], reverse=True)
- utils.add_index(response)
-
- return response
-
-
-@app.route('/api/1.0/stats/distinct_engineers')
-@decorators.exception_handler()
-@decorators.response()
-@decorators.cached()
-@decorators.jsonify('stats')
-@decorators.record_filter()
-def get_distinct_engineers(records, **kwargs):
- result = {}
- for record in records:
- result[record.user_id] = {
- 'author_name': record.author_name,
- 'author_email': record.author_email,
- }
- return result
-
-
-@app.route('/api/1.0/activity')
-@decorators.exception_handler()
-@decorators.response()
-@decorators.jsonify('activity')
-@decorators.record_filter()
-def get_activity_json(records, **kwargs):
- start_record = int(flask.request.args.get('start_record') or 0)
- page_size = int(flask.request.args.get('page_size') or
- parameters.DEFAULT_RECORDS_LIMIT)
- query_message = flask.request.args.get('query_message')
- return helpers.get_activity(records, start_record, page_size,
- query_message)
-
-
-@app.route('/api/1.0/contribution')
-@decorators.exception_handler()
-@decorators.response()
-@decorators.cached(ignore=['metric'])
-@decorators.jsonify('contribution')
-@decorators.record_filter(ignore=['metric'])
-def get_contribution_json(records, **kwargs):
- return helpers.get_contribution_summary(records)
-
-
-@app.route('/api/1.0/companies')
-@decorators.exception_handler()
-@decorators.response()
-@decorators.cached(ignore=['company'])
-@decorators.jsonify()
-@decorators.record_filter(ignore=['company'])
-def get_companies_json(record_ids, **kwargs):
- memory_storage = vault.get_memory_storage()
- companies = set(company
- for company in memory_storage.get_index_keys_by_record_ids(
- 'company_name', record_ids))
-
- if kwargs['_params']['company']:
- companies.add(memory_storage.get_original_company_name(
- kwargs['_params']['company'][0]))
-
- return [{'id': c.lower().replace('&', ''), 'text': c}
- for c in sorted(companies)]
-
-
-@app.route('/api/1.0/modules')
-@decorators.exception_handler()
-@decorators.response()
-@decorators.cached(ignore=['module'])
-@decorators.jsonify()
-@decorators.record_filter(ignore=['module'])
-def get_modules_json(record_ids, **kwargs):
- module_id_index = vault.get_vault()['module_id_index']
-
- tags = parameters.get_parameter(kwargs, 'tag', plural_name='tags')
-
- # all modules mentioned in records
- module_ids = vault.get_memory_storage().get_index_keys_by_record_ids(
- 'module', record_ids)
-
- add_modules = set([])
- for module in six.itervalues(module_id_index):
- if set(module['modules']) & module_ids:
- add_modules.add(module['id'])
- module_ids |= add_modules
-
- # keep only modules with specified tags
- if tags:
- module_ids = set(module_id for module_id in module_ids
- if ((module_id in module_id_index) and
- (module_id_index[module_id].get('tag') in tags)))
-
- result = []
- for module_id in module_ids:
- module = module_id_index[module_id]
- result.append({'id': module['id'],
- 'text': module['module_group_name'],
- 'tag': module['tag']})
-
- return sorted(result, key=operator.itemgetter('text'))
-
-
-@app.route('/api/1.0/companies/')
-@decorators.response()
-@decorators.cached()
-@decorators.jsonify('company')
-def get_company(company_name, **kwargs):
- memory_storage_inst = vault.get_memory_storage()
- for company in memory_storage_inst.get_companies():
- if company.lower() == company_name.lower():
- return {
- 'id': company_name,
- 'text': memory_storage_inst.get_original_company_name(
- company_name)
- }
- flask.abort(404)
-
-
-@app.route('/api/1.0/modules/')
-@decorators.response()
-@decorators.cached()
-@decorators.jsonify('module')
-def get_module(module_id, **kwargs):
- project_type = parameters.get_single_parameter(kwargs, 'project_type')
- release = parameters.get_single_parameter(kwargs, 'release')
- module = helpers.extend_module(module_id, project_type, release)
- if not module:
- flask.abort(404)
- return module
-
-
-@app.route('/api/1.0/members')
-@decorators.exception_handler()
-@decorators.response()
-@decorators.cached(ignore=['release', 'project_type', 'module'])
-@decorators.jsonify('members')
-@decorators.record_filter(ignore=['release', 'project_type', 'module'])
-def get_members(records, **kwargs):
- response = []
- for record in records:
- record = vault.extend_record(record)
- nr = dict([(k, record[k]) for k in
- ['author_name', 'date', 'company_name', 'member_uri']])
- nr['date_str'] = helpers.format_date(nr['date'])
- response.append(nr)
-
- response.sort(key=lambda x: x['date'], reverse=True)
- utils.add_index(response)
-
- return response
-
-
-@app.route('/api/1.0/stats/bp')
-@decorators.exception_handler()
-@decorators.response()
-@decorators.cached()
-@decorators.jsonify('stats')
-@decorators.record_filter()
-def get_bpd(records, **kwargs):
- result = []
- for record in records:
- if record.record_type in ['bpd', 'bpc']:
- record = vault.extend_record(record)
- mention_date = record.get('mention_date')
- if mention_date:
- date = helpers.format_date(mention_date)
- else:
- date = 'never'
- result.append({
- 'date': date,
- 'status': record['lifecycle_status'],
- 'metric': record.get('mention_count') or 0,
- 'id': record['name'],
- 'name': record['name'],
- 'link': helpers.make_blueprint_link(record['module'],
- record['name'])
- })
-
- result.sort(key=lambda x: x['metric'], reverse=True)
- utils.add_index(result)
-
- return result
-
-
-@app.route('/api/1.0/languages')
-@decorators.exception_handler()
-@decorators.response()
-@decorators.cached(ignore=['language'])
-@decorators.jsonify()
-@decorators.record_filter(ignore=['language'])
-def get_languages_json(record_ids, **kwargs):
- memory_storage = vault.get_memory_storage()
- languages = set(r.value for r in memory_storage.get_records(record_ids))
-
- return [{'id': c.lower().replace('&', ''), 'text': c}
- for c in sorted(languages)]
-
-
-@app.route('/api/1.0/stats/languages')
-@decorators.exception_handler()
-@decorators.response()
-@decorators.cached()
-@decorators.jsonify('stats')
-@decorators.record_filter(ignore=['language'])
-def get_languages(records, **kwargs):
- result = []
- languages = collections.defaultdict(int)
- for record in records:
- if record.record_type in ['tr']:
- languages[record.value] += record.loc
-
- for lang, val in six.iteritems(languages):
- result.append({
- 'id': lang,
- 'name': lang,
- 'metric': val,
- })
-
- result.sort(key=lambda x: x['metric'], reverse=True)
- utils.add_index(result)
-
- return result
-
-
-@app.route('/api/1.0/users')
-@decorators.exception_handler()
-@decorators.response()
-@decorators.cached(ignore=['user_id'])
-@decorators.jsonify()
-@decorators.record_filter(ignore=['user_id'])
-def get_users_json(record_ids, **kwargs):
- core_in = parameters.get_single_parameter(kwargs, 'core_in') or None
- valid_modules = set()
- if core_in:
- core_in = set(core_in.split(','))
- valid_modules = vault.resolve_project_types(
- kwargs['_params']['project_type'])
- valid_modules = set(m[0] for m in vault.resolve_modules(
- valid_modules, kwargs['_params']['release']))
-
- user_ids = vault.get_memory_storage().get_index_keys_by_record_ids(
- 'user_id', record_ids)
- if kwargs['_params']['user_id']:
- user_ids.add(kwargs['_params']['user_id'][0])
-
- result = []
- for user_id in user_ids:
- user = vault.get_user_from_runtime_storage(user_id)
- r = {'id': user_id, 'text': user.get('user_name') or user['user_id']}
-
- add_flag = not core_in
- if core_in and user.get('core'):
- core_modules = [module_branch[0] for module_branch in user['core']
- if (module_branch[1] in core_in and
- module_branch[0] in valid_modules)]
- if core_modules:
- r['core'] = core_modules
- if user['companies']:
- r['company_name'] = helpers.get_current_company(user)
- add_flag = True
- if add_flag:
- result.append(r)
-
- result.sort(key=lambda x: x['text'])
- return result
-
-
-@app.route('/api/1.0/users/')
-@decorators.response()
-@decorators.jsonify('user')
-def get_user(user_id):
- user = vault.get_user_from_runtime_storage(user_id)
- if not user:
- flask.abort(404)
- user = helpers.extend_user(user)
- return user
-
-
-@app.route('/api/1.0/releases')
-@decorators.exception_handler()
-@decorators.response()
-@decorators.cached(ignore=parameters.FILTER_PARAMETERS)
-@decorators.jsonify(root=('data', 'default'))
-def get_releases_json(**kwargs):
- releases = [{'id': release['release_name'],
- 'text': release['release_name'].capitalize()}
- for release in vault.get_vault()['releases'].values()]
- releases.append({'id': 'all', 'text': 'All'})
- releases.reverse()
- return (releases, parameters.get_default('release'))
-
-
-@app.route('/api/1.0/metrics')
-@decorators.exception_handler()
-@decorators.response()
-@decorators.cached(ignore=parameters.FILTER_PARAMETERS)
-@decorators.jsonify(root=('data', 'default'))
-def get_metrics_json(**kwargs):
- return (sorted([{'id': m, 'text': t} for m, t in
- six.iteritems(parameters.METRIC_LABELS)],
- key=operator.itemgetter('text')),
- parameters.get_default('metric'))
-
-
-@app.route('/api/1.0/project_types')
-@decorators.response()
-@decorators.exception_handler()
-@decorators.cached(ignore=parameters.FILTER_PARAMETERS)
-@decorators.jsonify(root=('data', 'default'))
-def get_project_types_json(**kwargs):
- return ([{'id': pt['id'], 'text': pt['title'],
- 'child': pt.get('child', False)}
- for pt in vault.get_project_types()],
- parameters.get_default('project_type'))
-
-
-@app.route('/api/1.0/affiliation_changes')
-@decorators.exception_handler()
-@decorators.response()
-@decorators.jsonify('affiliation_changes')
-def get_company_changes(**kwargs):
-
- start_days = str(flask.request.args.get('start_days') or
- utils.timestamp_to_date(int(time.time()) -
- 365 * 24 * 60 * 60))
- end_days = str(flask.request.args.get('end_days') or
- utils.timestamp_to_date(int(time.time())))
-
- start_date = utils.date_to_timestamp_ext(start_days)
- end_date = utils.date_to_timestamp_ext(end_days)
-
- runtime_storage = vault.get_runtime_storage()
- result = []
-
- for user in runtime_storage.get_all_users():
- companies = user.get('companies') or []
- if len(companies) < 2:
- continue
-
- companies_iter = iter(companies)
- company = companies_iter.next()
- old_company_name = company['company_name']
- date = company['end_date']
-
- for company in companies_iter:
- new_company_name = company['company_name']
-
- if start_date <= date <= end_date:
- result.append({
- 'user_id': user['user_id'],
- 'user_name': user['user_name'],
- 'old_company_name': old_company_name,
- 'new_company_name': new_company_name,
- 'date': date,
- })
-
- old_company_name = new_company_name
- date = company['end_date']
-
- return result
-
-
-def _get_week(kwargs, param_name):
- date_param = parameters.get_single_parameter(kwargs, param_name)
- if date_param:
- ts = utils.date_to_timestamp_ext(date_param)
- else:
- ts = vault.get_vault()[param_name]
- return utils.timestamp_to_week(ts)
-
-
-@app.route('/api/1.0/stats/timeline')
-@decorators.exception_handler()
-@decorators.response()
-@decorators.cached()
-@decorators.jsonify('timeline')
-@decorators.record_filter(ignore=['release', 'start_date'])
-def timeline(records, **kwargs):
- # find start and end dates
- metric = parameters.get_parameter(kwargs, 'metric')
- start_date = int(parameters.get_single_parameter(kwargs, 'start_date')
- or 0)
- release_name = parameters.get_single_parameter(kwargs, 'release') or 'all'
- releases = vault.get_vault()['releases']
-
- if 'all' in release_name:
- start_week = release_start_week = _get_week(kwargs, 'start_date')
- end_week = release_end_week = _get_week(kwargs, 'end_date')
- else:
- release = releases[release_name]
- start_week = release_start_week = utils.timestamp_to_week(
- release['start_date'])
- end_week = release_end_week = utils.timestamp_to_week(
- release['end_date'])
-
- now = utils.timestamp_to_week(int(time.time())) + 1
-
- # expand start-end to year if needed
- if release_end_week - release_start_week < 52:
- expansion = (52 - (release_end_week - release_start_week)) // 2
- if release_end_week + expansion < now:
- end_week += expansion
- else:
- end_week = now
- start_week = end_week - 52
-
- # empty stats for all weeks in range
- weeks = range(start_week, end_week)
- week_stat_loc = dict((c, 0) for c in weeks)
- week_stat_commits = dict((c, 0) for c in weeks)
- week_stat_commits_hl = dict((c, 0) for c in weeks)
-
- commits_handler = lambda record: 1
- if 'translations' in metric:
- commits_handler = lambda record: record.loc
-
- if ('commits' in metric) or ('loc' in metric):
- loc_handler = lambda record: record.loc
- else:
- loc_handler = lambda record: 0
-
- # fill stats with the data
- if 'person-day' in metric:
- # special case for man-day effort metric
- release_stat = collections.defaultdict(set)
- all_stat = collections.defaultdict(set)
- for record in records:
- if start_week <= record.week < end_week:
- day = utils.timestamp_to_day(record.date)
- user_id = record.user_id
- if record.release == release_name:
- release_stat[day].add(user_id)
- all_stat[day].add(user_id)
- for day, users in six.iteritems(release_stat):
- week = utils.timestamp_to_week(day * 24 * 3600)
- week_stat_commits_hl[week] += len(users)
- for day, users in six.iteritems(all_stat):
- week = utils.timestamp_to_week(day * 24 * 3600)
- week_stat_commits[week] += len(users)
- else:
- for record in records:
- week = record.week
- if start_week <= week < end_week:
- week_stat_loc[week] += loc_handler(record)
- week_stat_commits[week] += commits_handler(record)
- if 'members' in metric:
- if record.date >= start_date:
- week_stat_commits_hl[week] += 1
- else:
- if record.release == release_name:
- week_stat_commits_hl[week] += commits_handler(record)
-
- if 'all' == release_name and 'members' not in metric:
- week_stat_commits_hl = week_stat_commits
-
- # form arrays in format acceptable to timeline plugin
- array_loc = []
- array_commits = []
- array_commits_hl = []
-
- for week in weeks:
- week_str = utils.week_to_date(week)
- array_loc.append([week_str, week_stat_loc[week]])
- array_commits.append([week_str, week_stat_commits[week]])
- array_commits_hl.append([week_str, week_stat_commits_hl[week]])
-
- return [array_commits, array_commits_hl, array_loc]
-
-
-@app.template_test()
-def too_old(timestamp):
- age = CONF.age_warn
- now = time.time()
- return timestamp + age < now
-
-
-def main():
- logging.register_options(CONF)
- logging.set_defaults()
-
- conf_file = os.getenv('STACKALYTICS_CONF')
- if conf_file and os.path.isfile(conf_file):
- CONF(default_config_files=[conf_file])
- app.config['DEBUG'] = CONF.debug
- LOG.info('Stackalytics.dashboard is configured via "%s"', conf_file)
- else:
- CONF(project='stackalytics')
-
- logging.setup(CONF, 'stackalytics.dashboard')
-
- app.run(CONF.listen_host, CONF.listen_port)
-
-if __name__ == '__main__':
- main()
diff --git a/stackalytics/dashboard/web.wsgi b/stackalytics/dashboard/web.wsgi
deleted file mode 100644
index d4fd174e5..000000000
--- a/stackalytics/dashboard/web.wsgi
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from stackalytics.dashboard.web import app as application
diff --git a/stackalytics/processor/__init__.py b/stackalytics/processor/__init__.py
deleted file mode 100644
index c9d84b54b..000000000
--- a/stackalytics/processor/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-__author__ = 'ishakhat'
diff --git a/stackalytics/processor/bps.py b/stackalytics/processor/bps.py
deleted file mode 100644
index f6c07adcf..000000000
--- a/stackalytics/processor/bps.py
+++ /dev/null
@@ -1,83 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from oslo_log import log as logging
-
-from stackalytics.processor import launchpad_utils
-from stackalytics.processor import utils
-
-
-LOG = logging.getLogger(__name__)
-
-LINK_FIELDS = ['owner', 'assignee']
-BUG_FIELDS = ['web_link', 'status', 'title', 'importance']
-DATE_FIELDS = ['date_created', 'date_fix_committed', 'date_fix_released']
-
-
-def _get_bug_id(web_link):
- return web_link[web_link.rfind('/') + 1:]
-
-
-def _log_module(module, primary_module, modified_since):
- for record_draft in launchpad_utils.lp_bug_generator(module,
- modified_since):
-
- # record_draft can be a bug or bug target and
- # in the latter case it can be from a different module
- bug_target = record_draft['bug_target_name'].split('/')
- target_module = bug_target[0]
- if target_module != module:
- continue # ignore foreigners
-
- record = {}
-
- if len(bug_target) == 2:
- record['release'] = bug_target[1] # treat target as release
-
- for field in LINK_FIELDS:
- link = record_draft[field + '_link']
- if link:
- record[field] = launchpad_utils.link_to_launchpad_id(link)
-
- for field in BUG_FIELDS:
- record[field] = record_draft[field]
-
- for field in DATE_FIELDS:
- date = record_draft[field]
- if date:
- record[field] = utils.iso8601_to_timestamp(date)
-
- bug_id = _get_bug_id(record_draft['web_link'])
- record['module'] = primary_module
- record['id'] = utils.make_bug_id(bug_id, primary_module,
- record.get('release'))
-
- LOG.debug('New bug: %s', record)
- yield record
-
-
-def log(repo, modified_since):
- repo_module = repo['module']
- modules = [repo_module] + repo.get('aliases', [])
-
- for module in modules:
- if not launchpad_utils.lp_module_exists(module):
- LOG.debug('Module %s does not exist at Launchpad, skip it', module)
- continue
-
- LOG.debug('Retrieving list of bugs for module: %s', module)
-
- for record in _log_module(module, repo_module, modified_since):
- yield record
diff --git a/stackalytics/processor/config.py b/stackalytics/processor/config.py
deleted file mode 100644
index 8f238c03d..000000000
--- a/stackalytics/processor/config.py
+++ /dev/null
@@ -1,66 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import copy
-
-from oslo_config import cfg
-
-
-CONNECTION_OPTS = [
- cfg.StrOpt('runtime-storage-uri', default='memcached://127.0.0.1:11211',
- help='Storage URI'),
-]
-
-PROCESSOR_OPTS = [
- cfg.StrOpt('default-data-uri',
- default='https://git.openstack.org/cgit/'
- 'openstack/stackalytics/plain/etc/default_data.json',
- help='URI for default data. A local file can be used with the '
- 'prefix "file://". For example, '
- 'default_data_uri = file:///path/to/default_data.json'),
- cfg.StrOpt('sources-root', default='/var/local/stackalytics',
- help='The folder that holds all project sources to analyze'),
- cfg.IntOpt('days_to_update_members', default=30,
- help='Number of days to update members'),
- cfg.StrOpt('corrections-uri',
- default=('https://git.openstack.org/cgit/'
- 'openstack/stackalytics/plain/etc/corrections.json'),
- help='The address of file with corrections data'),
- cfg.StrOpt('review-uri', default='gerrit://review.openstack.org',
- help='URI of review system'),
- cfg.StrOpt('git-base-uri', default='git://git.openstack.org',
- help='git base location'),
- cfg.StrOpt('ssh-key-filename', default='/home/user/.ssh/id_rsa',
- help='SSH key for gerrit review system access'),
- cfg.StrOpt('ssh-username', default='user',
- help='SSH username for gerrit review system access'),
- cfg.StrOpt('translation-team-uri',
- default='https://git.openstack.org/cgit/openstack/i18n/'
- 'plain/tools/zanata/translation_team.yaml',
- help='URI of translation team data'),
- cfg.StrOpt("fetching-user-source", default='launchpad',
- choices=['launchpad', ''],
- help="Source for fetching user profiles"),
- cfg.IntOpt('members-look-ahead', default=250,
- help='How many member profiles to look ahead after the last'),
- cfg.IntOpt('read-timeout', default=120,
- help='Number of seconds to wait for remote response'),
- cfg.IntOpt('gerrit-retry', default=10,
- help='How many times to retry after Gerrit errors'),
-]
-
-
-def list_opts():
- yield (None, copy.deepcopy(CONNECTION_OPTS + PROCESSOR_OPTS))
diff --git a/stackalytics/processor/default_data_processor.py b/stackalytics/processor/default_data_processor.py
deleted file mode 100644
index 3c22d1722..000000000
--- a/stackalytics/processor/default_data_processor.py
+++ /dev/null
@@ -1,238 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import hashlib
-import json
-import re
-
-from github import MainClass
-from oslo_config import cfg
-from oslo_log import log as logging
-import six
-
-from stackalytics.processor import normalizer
-from stackalytics.processor import rcs
-from stackalytics.processor import user_processor
-from stackalytics.processor import utils
-
-CONF = cfg.CONF
-LOG = logging.getLogger(__name__)
-
-GITHUB_URI_PREFIX = r'^github:\/\/'
-
-
-def _check_default_data_change(runtime_storage_inst, default_data):
- h = hashlib.new('sha1')
- h.update(json.dumps(default_data))
- digest = h.hexdigest()
-
- p_digest = runtime_storage_inst.get_by_key('default_data_digest')
- if digest == p_digest:
- LOG.debug('No changes in default data, sha1: %s', digest)
- return False
-
- LOG.debug('Default data has changes, sha1: %s', digest)
- runtime_storage_inst.set_by_key('default_data_digest', digest)
- return True
-
-
-def _retrieve_project_list_from_sources(project_sources):
- for project_source in project_sources:
- uri = project_source.get('uri') or CONF.review_uri
- repo_iterator = []
- if re.search(rcs.GERRIT_URI_PREFIX, uri):
- repo_iterator = _retrieve_project_list_from_gerrit(project_source)
- elif re.search(GITHUB_URI_PREFIX, uri):
- repo_iterator = _retrieve_project_list_from_github(project_source)
-
- exclude = set(project_source.get('exclude', []))
- for repo in repo_iterator:
- if repo['module'] not in exclude:
- yield repo
-
-
-def _retrieve_project_list_from_gerrit(project_source):
- LOG.info('Retrieving project list from Gerrit')
- try:
- uri = project_source.get('uri') or CONF.review_uri
- gerrit_inst = rcs.Gerrit(uri)
- key_filename = (project_source.get('ssh_key_filename') or
- CONF.ssh_key_filename)
- username = project_source.get('ssh_username') or CONF.ssh_username
- gerrit_inst.setup(key_filename=key_filename, username=username)
-
- project_list = gerrit_inst.get_project_list()
- gerrit_inst.close()
- except rcs.RcsException:
- LOG.error('Failed to retrieve list of projects')
- raise
-
- organization = project_source['organization']
- LOG.debug('Get list of projects for organization %s', organization)
- git_repos = [f for f in project_list if f.startswith(organization + "/")]
-
- git_base_uri = project_source.get('git_base_uri') or CONF.git_base_uri
-
- for repo in git_repos:
- (org, name) = repo.split('/')
- repo_uri = '%(git_base_uri)s/%(repo)s.git' % dict(
- git_base_uri=git_base_uri, repo=repo)
- yield {
- 'branches': ['master'],
- 'module': name,
- 'organization': org,
- 'uri': repo_uri,
- 'releases': [],
- 'has_gerrit': True,
- }
-
-
-def _retrieve_project_list_from_github(project_source):
- LOG.info('Retrieving project list from GitHub')
- github = MainClass.Github(timeout=60)
-
- organization = project_source['organization']
- LOG.debug('Get list of projects for organization %s', organization)
- try:
- github_repos = github.get_organization(organization).get_repos()
- except Exception as e:
- LOG.error('Failed to retrieve list of projects from GitHub: %s',
- e, exc_info=True)
- raise
-
- for repo in github_repos:
- yield {
- 'branches': [project_source.get('default_branch', 'master')],
- 'module': repo.name.lower(),
- 'organization': organization,
- 'uri': repo.git_url,
- 'releases': []
- }
-
-
-def _create_module_groups_for_project_sources(project_sources, repos):
- organizations = collections.defaultdict(list)
- for repo in repos:
- organizations[repo['organization']].append(repo['module'])
-
- # organization -> (module_group_id, module_group_name)
- ps_organizations = dict(
- [(ps.get('organization'),
- (ps.get('module_group_id') or ps.get('organization'),
- ps.get('module_group_name') or ps.get('organization')))
- for ps in project_sources])
-
- module_groups = []
- for ogn, modules in six.iteritems(organizations):
- module_group_id = ogn
- module_group_name = ogn
-
- if ogn in ps_organizations:
- module_group_id = ps_organizations[ogn][0]
- module_group_name = ps_organizations[ogn][1]
-
- module_groups.append(utils.make_module_group(
- module_group_id, name=module_group_name, modules=modules,
- tag='organization'))
-
- return module_groups
-
-
-def _update_project_list(default_data):
-
- configured_repos = set([r['uri'] for r in default_data['repos']])
-
- repos = _retrieve_project_list_from_sources(
- default_data['project_sources'])
- if repos:
- # update pre-configured and exclude all projects start with 'deb-'
- repos_dict = dict((r['uri'], r) for r in repos
- if not r['module'].startswith('deb-'))
- for r in default_data['repos']:
- if r['uri'] in repos_dict:
- for k, v in repos_dict[r['uri']].items():
- if k not in r:
- r[k] = v
-
- # update default data
- default_data['repos'] += [r for r in repos_dict.values()
- if r['uri'] not in configured_repos]
-
- default_data['module_groups'] += _create_module_groups_for_project_sources(
- default_data['project_sources'], default_data['repos'])
-
-
-def _store_users(runtime_storage_inst, users):
- for user in users:
- stored_user = user_processor.load_user(runtime_storage_inst,
- user_id=user['user_id'])
- updated_user = user_processor.update_user_profile(stored_user, user)
- user_processor.store_user(runtime_storage_inst, updated_user)
-
-
-def _store_companies(runtime_storage_inst, companies):
- domains_index = {}
- for company in companies:
- for domain in company['domains']:
- domains_index[domain] = company['company_name']
-
- if 'aliases' in company:
- for alias in company['aliases']:
- normalized_alias = utils.normalize_company_name(alias)
- domains_index[normalized_alias] = company['company_name']
- normalized_company_name = utils.normalize_company_name(
- company['company_name'])
- domains_index[normalized_company_name] = company['company_name']
-
- runtime_storage_inst.set_by_key('companies', domains_index)
-
-
-def _store_module_groups(runtime_storage_inst, module_groups):
- stored_mg = runtime_storage_inst.get_by_key('module_groups') or {}
- for mg in module_groups:
- name = mg['module_group_name']
- module_group_id = mg.get('id') or name
- stored_mg[module_group_id] = utils.make_module_group(
- module_group_id, name=name, modules=mg['modules'],
- tag=mg.get('tag', 'group'))
- runtime_storage_inst.set_by_key('module_groups', stored_mg)
-
-
-STORE_FUNCS = {
- 'users': _store_users,
- 'companies': _store_companies,
- 'module_groups': _store_module_groups,
-}
-
-
-def _store_default_data(runtime_storage_inst, default_data):
- normalizer.normalize_default_data(default_data)
-
- LOG.debug('Update runtime storage with default data')
- for key, value in six.iteritems(default_data):
- if key in STORE_FUNCS:
- STORE_FUNCS[key](runtime_storage_inst, value)
- else:
- runtime_storage_inst.set_by_key(key, value)
-
-
-def process(runtime_storage_inst, default_data):
- LOG.debug('Process default data')
-
- if 'project_sources' in default_data:
- _update_project_list(default_data)
-
- _store_default_data(runtime_storage_inst, default_data)
diff --git a/stackalytics/processor/dump.py b/stackalytics/processor/dump.py
deleted file mode 100644
index 3b1a38918..000000000
--- a/stackalytics/processor/dump.py
+++ /dev/null
@@ -1,180 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pickle
-import re
-import sys
-
-import memcache
-from oslo_config import cfg
-from oslo_log import log as logging
-import six
-
-from stackalytics.processor import config
-from stackalytics.processor import utils
-
-
-CONF = cfg.CONF
-LOG = logging.getLogger(__name__)
-
-OPTS = [
- cfg.BoolOpt('restore',
- short='r',
- help='Restore data into memcached'),
- cfg.StrOpt('file',
- short='f',
- help='The name of file to store data'),
- cfg.StrOpt('min-compress-len', default=0,
- short='m',
- help='The threshold length to kick in auto-compression'),
-]
-
-
-SINGLE_KEYS = ['module_groups', 'project_types', 'repos', 'releases',
- 'companies', 'last_update_members_date', 'last_member_index',
- 'runtime_storage_update_time']
-ARRAY_KEYS = ['record', 'user']
-BULK_READ_SIZE = 64
-MEMCACHED_URI_PREFIX = r'^memcached:\/\/'
-
-
-def read_records_from_fd(fd):
- while True:
- try:
- record = pickle.load(fd)
- except EOFError:
- break
- yield record
-
-
-def store_bucket(memcached_inst, bucket):
- LOG.debug('Store bucket of records into memcached')
- res = memcached_inst.set_multi(bucket,
- min_compress_len=CONF.min_compress_len)
- if res:
- LOG.critical('Failed to set values in memcached: %s', res)
- raise Exception('memcached set_multi operation is failed')
-
-
-def import_data(memcached_inst, fd):
- LOG.info('Importing data into memcached')
- bucket = {}
- for key, value in read_records_from_fd(fd):
- LOG.debug('Reading record key %s, value %s', key, value)
- if len(bucket) == BULK_READ_SIZE:
- store_bucket(memcached_inst, bucket)
- bucket = {}
- bucket[key] = value
- if bucket:
- store_bucket(memcached_inst, bucket)
-
-
-def get_repo_keys(memcached_inst):
- for repo in (memcached_inst.get('repos') or []):
- uri = repo['uri']
- quoted_uri = six.moves.urllib.parse.quote_plus(uri)
-
- yield 'bug_modified_since-%s' % repo['module']
-
- branches = {repo.get('default_branch', 'master')}
- for release in repo.get('releases'):
- if 'branch' in release:
- branches.add(release['branch'])
-
- for branch in branches:
- yield 'vcs:%s:%s' % (quoted_uri, branch)
- yield 'rcs:%s:%s' % (quoted_uri, branch)
-
-
-def export_data(memcached_inst, fd):
- LOG.info('Exporting data from memcached')
-
- for key in SINGLE_KEYS:
- pickle.dump((key, memcached_inst.get(key)), fd)
-
- for key in get_repo_keys(memcached_inst):
- pickle.dump((key, memcached_inst.get(key)), fd)
-
- for key in ARRAY_KEYS:
- key_count = key + ':count'
- count = memcached_inst.get(key_count) or 0
- pickle.dump((key_count, memcached_inst.get(key_count)), fd)
-
- key_prefix = key + ':'
-
- for record_id_set in utils.make_range(0, count + 1, BULK_READ_SIZE):
- # memcache limits the size of returned data to specific yet unknown
- # chunk size, the code should verify that all requested records are
- # returned an be able to fall back to one-by-one retrieval
-
- chunk = memcached_inst.get_multi(record_id_set, key_prefix)
- if len(chunk) < len(record_id_set):
- # retrieve one-by-one
- for record_id in record_id_set:
- key = key_prefix + str(record_id)
- pickle.dump((key, memcached_inst.get(key)), fd)
- else:
- # dump the whole chunk
- for k, v in six.iteritems(chunk):
- pickle.dump((key_prefix + str(k), v), fd)
-
- for user_seq in range((memcached_inst.get('user:count') or 0) + 1):
- user = memcached_inst.get('user:%s' % user_seq)
- if user:
- if user.get('user_id'):
- pickle.dump((('user:%s' % user['user_id']).encode('utf8'),
- user), fd)
- if user.get('launchpad_id'):
- pickle.dump(('user:%s' % user['launchpad_id'], user), fd)
- if user.get('gerrit_id'):
- pickle.dump(('user:gerrit:%s' % user['gerrit_id'], user), fd)
- if user.get('member_id'):
- pickle.dump(('user:member:%s' % user['member_id'], user), fd)
- for email in user.get('emails') or []:
- pickle.dump((('user:%s' % email).encode('utf8'), user), fd)
-
-
-def _connect_to_memcached(uri):
- stripped = re.sub(MEMCACHED_URI_PREFIX, '', uri)
- if stripped:
- storage_uri = stripped.split(',')
- return memcache.Client(storage_uri)
- else:
- raise Exception('Invalid storage uri %s' % uri)
-
-
-def main():
- utils.init_config_and_logging(config.CONNECTION_OPTS + OPTS)
-
- memcached_inst = _connect_to_memcached(CONF.runtime_storage_uri)
-
- filename = CONF.file
-
- if CONF.restore:
- if filename:
- fd = open(filename, 'r')
- else:
- fd = sys.stdin
- import_data(memcached_inst, fd)
- else:
- if filename:
- fd = open(filename, 'w')
- else:
- fd = sys.stdout
- export_data(memcached_inst, fd)
-
-
-if __name__ == '__main__':
- main()
diff --git a/stackalytics/processor/governance.py b/stackalytics/processor/governance.py
deleted file mode 100644
index 37f5aef7f..000000000
--- a/stackalytics/processor/governance.py
+++ /dev/null
@@ -1,134 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-
-from oslo_log import log as logging
-import six
-import yaml
-
-from stackalytics.processor import utils
-
-
-LOG = logging.getLogger(__name__)
-
-# list of supported tags
-TAGS = ['tc:approved-release', 'type:service', 'type:library']
-
-
-def _make_module_group(module_groups, name):
- m = module_groups[name] # object created by defaultdict
- m['tag'] = 'project_type'
- m['module_group_name'] = name
- m['releases'] = collections.defaultdict(set)
- return m
-
-
-def read_legacy_programs_yaml(module_groups, release_name, content):
- all_official = module_groups['openstack-official']
-
- for name, info in six.iteritems(content):
- for module in info['projects']:
- mn = module['repo'].split('/')[1] # module_name
-
- # module_groups[group_id]['releases'][release_name].append(mn)
- all_official['releases'][release_name].add(mn)
-
-
-def read_early_big_tent_projects_yaml(module_groups, release_name, content):
- all_official = module_groups['openstack-official']
-
- for name, info in six.iteritems(content):
- for module in info['projects']:
- repo_split = module['repo'].split('/')
- if len(repo_split) < 2:
- continue # valid repo must be in form of 'org/module'
- mn = repo_split[1]
-
- # module_groups[group_id]['releases'][release_name].append(mn)
- all_official['releases'][release_name].add(mn)
-
-
-def read_big_tent_projects_yaml(module_groups, release_name, content):
- all_official = module_groups['openstack-official']
-
- for name, project in six.iteritems(content):
- group_id = '%s-group' % name.lower()
- module_groups[group_id]['module_group_name'] = (
- '%s Official' % name.title())
- module_groups[group_id]['tag'] = 'program'
-
- for d_name, deliverable in six.iteritems(project['deliverables']):
- for repo in deliverable['repos']:
- repo_split = repo.split('/')
- if len(repo_split) < 2:
- continue # valid repo must be in form of 'org/module'
-
- mn = repo_split[1] # module_name
-
- module_groups[group_id]['modules'].add(mn)
- all_official['releases'][release_name].add(mn)
-
- tags = deliverable.get('tags', [])
- for tag in tags:
- if tag in TAGS:
- module_groups[tag]['releases'][release_name].add(mn)
-
-
-def _make_default_module_groups():
- # create default module groups
- module_groups = collections.defaultdict(lambda: {'modules': set()})
-
- # openstack official
- _make_module_group(module_groups, 'openstack-official')
-
- # openstack others
- _make_module_group(module_groups, 'openstack-others')
-
- # tags
- for tag in TAGS:
- _make_module_group(module_groups, tag)
-
- return module_groups
-
-
-GOVERNANCE_PROCESSORS = {
- 'legacy': read_legacy_programs_yaml,
- 'early_big_tent': read_early_big_tent_projects_yaml,
- 'big_tent': read_big_tent_projects_yaml,
-}
-
-
-def process_official_list(releases):
- module_groups = _make_default_module_groups()
- releases_with_refs = (r for r in releases if r.get('refs'))
-
- for release in releases_with_refs:
- ref_governance = release['refs'].get('governance')
- if not ref_governance:
- continue
-
- gov_type = ref_governance['type']
- gov_source = ref_governance['source']
- release_name = release['release_name'].lower()
-
- LOG.debug('Process governance content from uri: %s', gov_source)
- content = yaml.safe_load(utils.read_uri(gov_source))
-
- GOVERNANCE_PROCESSORS[gov_type](module_groups, release_name, content)
-
- # set ids for module groups
- for group_id, group in six.iteritems(module_groups):
- group['id'] = group_id
-
- return module_groups
diff --git a/stackalytics/processor/launchpad_utils.py b/stackalytics/processor/launchpad_utils.py
deleted file mode 100644
index 89b676899..000000000
--- a/stackalytics/processor/launchpad_utils.py
+++ /dev/null
@@ -1,148 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from oslo_log import log as logging
-import requests
-import six
-
-from stackalytics.processor import utils
-
-
-LOG = logging.getLogger(__name__)
-
-BUG_STATUSES = ['New', 'Incomplete', 'Opinion', 'Invalid', 'Won\'t Fix',
- 'Expired', 'Confirmed', 'Triaged', 'In Progress',
- 'Fix Committed', 'Fix Released',
- 'Incomplete (with response)',
- 'Incomplete (without response)']
-LP_URI_V1 = 'https://api.launchpad.net/1.0/%s'
-LP_URI_DEVEL = 'https://api.launchpad.net/devel/%s'
-
-launchpad_session = requests.Session()
-
-
-def link_to_launchpad_id(link):
- return link[link.find('~') + 1:]
-
-
-def _lp_profile_by_launchpad_id(launchpad_id):
- LOG.debug('Lookup user id %s at Launchpad', launchpad_id)
- uri = LP_URI_V1 % ('~' + launchpad_id)
- lp_profile = utils.read_json_from_uri(uri, session=launchpad_session)
- utils.validate_lp_display_name(lp_profile)
- return lp_profile
-
-
-def query_lp_user_name(launchpad_id):
- """Query user name by Launchpad ID
-
- :param launchpad_id: user's launchpad id
- :return: user name
- """
- if not launchpad_id:
- return None
-
- lp_profile = _lp_profile_by_launchpad_id(launchpad_id)
-
- if not lp_profile:
- LOG.debug('User with id %s not found', launchpad_id)
- return launchpad_id
-
- return lp_profile['display_name']
-
-
-def _lp_profile_by_email(email):
- LOG.debug('Lookup user email %s at Launchpad', email)
- uri = LP_URI_V1 % ('people/?ws.op=getByEmail&email=' + email)
- lp_profile = utils.read_json_from_uri(uri, session=launchpad_session)
- utils.validate_lp_display_name(lp_profile)
- return lp_profile
-
-
-def query_lp_info(email):
- """Query Launchpad ID and user name by email
-
- :param email: user email
- :return: tuple (launchpad id, name)
- """
- lp_profile = None
- if not utils.check_email_validity(email):
- LOG.debug('User email is not valid %s', email)
- else:
- lp_profile = _lp_profile_by_email(email)
-
- if not lp_profile:
- LOG.debug('User with email %s not found', email)
- return None, None
-
- LOG.debug('Email %(email)s is mapped to launchpad user %(lp)s',
- {'email': email, 'lp': lp_profile['name']})
- return lp_profile['name'], lp_profile['display_name']
-
-
-def lp_module_exists(module):
- uri = LP_URI_DEVEL % module
- request = utils.do_request(uri)
-
- LOG.debug('Checked uri: %(uri)s, status: %(status)s',
- {'uri': uri, 'status': request.status_code})
- return request.status_code != 404
-
-
-def lp_blueprint_generator(module):
- uri = LP_URI_DEVEL % (module + '/all_specifications')
- while uri:
- LOG.debug('Reading chunk from uri %s', uri)
- chunk = utils.read_json_from_uri(uri, session=launchpad_session)
-
- if not chunk:
- LOG.warning('No data was read from uri %s', uri)
- break
-
- for record in chunk['entries']:
- yield record
-
- uri = chunk.get('next_collection_link')
-
-
-def lp_bug_generator(module, modified_since):
- uri = LP_URI_DEVEL % (module + '?ws.op=searchTasks')
- for status in BUG_STATUSES:
- uri += '&status=' + six.moves.urllib.parse.quote_plus(status)
- if modified_since:
- uri += '&modified_since=' + utils.timestamp_to_utc_date(modified_since)
-
- while uri:
- LOG.debug('Reading chunk from uri %s', uri)
- chunk = utils.read_json_from_uri(uri, session=launchpad_session)
-
- if not chunk:
- LOG.warning('No data was read from uri %s', uri)
- break
-
- for record in chunk['entries']:
- yield record
-
- related_tasks_uri = record['related_tasks_collection_link']
- LOG.debug('Reading related task from uri %s', related_tasks_uri)
- related_tasks = utils.read_json_from_uri(related_tasks_uri,
- session=launchpad_session)
- if not related_tasks:
- LOG.warning('No data was read from uri %s', uri)
- elif related_tasks['entries']:
- for related_task in related_tasks['entries']:
- yield related_task
-
- uri = chunk.get('next_collection_link')
diff --git a/stackalytics/processor/lp.py b/stackalytics/processor/lp.py
deleted file mode 100644
index f35f6f52e..000000000
--- a/stackalytics/processor/lp.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from oslo_log import log as logging
-
-from stackalytics.processor import launchpad_utils
-from stackalytics.processor import utils
-
-
-LOG = logging.getLogger(__name__)
-
-LINK_FIELDS = ['owner', 'drafter', 'starter', 'completer',
- 'assignee', 'approver']
-DATE_FIELDS = ['date_created', 'date_completed', 'date_started']
-
-
-def _log_module(module, primary_module):
- for record in launchpad_utils.lp_blueprint_generator(module):
- for field in LINK_FIELDS:
- link = record[field + '_link']
- if link:
- record[field] = launchpad_utils.link_to_launchpad_id(link)
- del record[field + '_link']
- for field in DATE_FIELDS:
- date = record[field]
- if date:
- record[field] = utils.iso8601_to_timestamp(date)
-
- record['module'] = primary_module
- record['id'] = utils.get_blueprint_id(primary_module, record['name'])
-
- LOG.debug('New blueprint: %s', record)
- yield record
-
-
-def log(repo):
- repo_module = repo['module']
- modules = [repo_module] + repo.get('aliases', [])
-
- for module in modules:
- if not launchpad_utils.lp_module_exists(module):
- LOG.debug('Module %s does not exist at Launchpad, skip it', module)
- return
-
- LOG.debug('Retrieving list of blueprints for module: %s', module)
- for record in _log_module(module, repo_module):
- yield record
diff --git a/stackalytics/processor/main.py b/stackalytics/processor/main.py
deleted file mode 100644
index 077e422a0..000000000
--- a/stackalytics/processor/main.py
+++ /dev/null
@@ -1,338 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import itertools
-
-import jsonschema
-from oslo_config import cfg
-from oslo_log import log as logging
-import psutil
-import six
-
-from stackalytics.processor import bps
-from stackalytics.processor import config
-from stackalytics.processor import default_data_processor
-from stackalytics.processor import governance
-from stackalytics.processor import lp
-from stackalytics.processor import mls
-from stackalytics.processor import mps
-from stackalytics.processor import rcs
-from stackalytics.processor import record_processor
-from stackalytics.processor import runtime_storage
-from stackalytics.processor import schema
-from stackalytics.processor import utils
-from stackalytics.processor import vcs
-from stackalytics.processor import zanata
-
-CONF = cfg.CONF
-LOG = logging.getLogger(__name__)
-
-
-def get_pids():
- result = set([])
- for pid in psutil.pids():
- try:
- p = psutil.Process(pid)
- name = p.name()
- if name == 'uwsgi':
- LOG.debug('Found uwsgi process, pid: %s', pid)
- result.add(pid)
- except Exception as e:
- LOG.debug('Exception while iterating process list: %s', e)
- pass
-
- return result
-
-
-def update_pids(runtime_storage):
- pids = get_pids()
- if not pids:
- return
- runtime_storage.active_pids(pids)
-
-
-def _merge_commits(original, new):
- if new['branches'] < original['branches']:
- return False
- else:
- original['branches'] |= new['branches']
- return True
-
-
-def _record_typer(record_iterator, record_type):
- for record in record_iterator:
- record['record_type'] = record_type
- yield record
-
-
-def _get_repo_branches(repo):
- return ({repo.get('default_branch', 'master')} |
- set(r['branch'] for r in repo.get('releases', [])
- if 'branch' in r))
-
-
-def _process_repo_blueprints(repo, runtime_storage_inst,
- record_processor_inst):
- LOG.info('Processing blueprints for repo: %s', repo['uri'])
-
- bp_iterator = lp.log(repo)
- bp_iterator_typed = _record_typer(bp_iterator, 'bp')
- processed_bp_iterator = record_processor_inst.process(bp_iterator_typed)
-
- runtime_storage_inst.set_records(processed_bp_iterator,
- utils.merge_records)
-
-
-def _process_repo_bugs(repo, runtime_storage_inst, record_processor_inst):
- LOG.info('Processing bugs for repo: %s', repo['uri'])
-
- current_date = utils.date_to_timestamp('now')
- bug_modified_since = runtime_storage_inst.get_by_key(
- 'bug_modified_since-%s' % repo['module'])
-
- bug_iterator = bps.log(repo, bug_modified_since)
- bug_iterator_typed = _record_typer(bug_iterator, 'bug')
- processed_bug_iterator = record_processor_inst.process(bug_iterator_typed)
-
- runtime_storage_inst.set_records(processed_bug_iterator,
- utils.merge_records)
- runtime_storage_inst.set_by_key('bug_modified_since-%s' % repo['module'],
- current_date)
-
-
-def _process_repo_reviews(repo, runtime_storage_inst, record_processor_inst,
- rcs_inst):
- for branch in _get_repo_branches(repo):
- LOG.info('Processing reviews for repo: %s, branch: %s',
- repo['uri'], branch)
-
- quoted_uri = six.moves.urllib.parse.quote_plus(repo['uri'])
- rcs_key = 'rcs:%s:%s' % (quoted_uri, branch)
- last_retrieval_time = runtime_storage_inst.get_by_key(rcs_key)
- current_retrieval_time = utils.date_to_timestamp('now')
-
- review_iterator = itertools.chain(
- rcs_inst.log(repo, branch, last_retrieval_time, status='open'),
- rcs_inst.log(repo, branch, last_retrieval_time, status='merged'),
- rcs_inst.log(repo, branch, last_retrieval_time, status='abandoned',
- grab_comments=True), )
-
- review_iterator_typed = _record_typer(review_iterator, 'review')
- processed_review_iterator = record_processor_inst.process(
- review_iterator_typed)
-
- runtime_storage_inst.set_records(processed_review_iterator,
- utils.merge_records)
- runtime_storage_inst.set_by_key(rcs_key, current_retrieval_time)
-
-
-def _process_repo_vcs(repo, runtime_storage_inst, record_processor_inst):
- vcs_inst = vcs.get_vcs(repo, CONF.sources_root)
- vcs_inst.fetch()
-
- for branch in _get_repo_branches(repo):
- LOG.info('Processing commits in repo: %s, branch: %s',
- repo['uri'], branch)
-
- quoted_uri = six.moves.urllib.parse.quote_plus(repo['uri'])
- vcs_key = 'vcs:%s:%s' % (quoted_uri, branch)
- last_id = runtime_storage_inst.get_by_key(vcs_key)
-
- commit_iterator = vcs_inst.log(branch, last_id)
- commit_iterator_typed = _record_typer(commit_iterator, 'commit')
- processed_commit_iterator = record_processor_inst.process(
- commit_iterator_typed)
- runtime_storage_inst.set_records(
- processed_commit_iterator, _merge_commits)
-
- last_id = vcs_inst.get_last_id(branch)
- runtime_storage_inst.set_by_key(vcs_key, last_id)
-
-
-def _process_repo(repo, runtime_storage_inst, record_processor_inst,
- rcs_inst):
- LOG.info('Processing repo: %s', repo['uri'])
-
- _process_repo_vcs(repo, runtime_storage_inst, record_processor_inst)
-
- _process_repo_bugs(repo, runtime_storage_inst, record_processor_inst)
-
- _process_repo_blueprints(repo, runtime_storage_inst, record_processor_inst)
-
- if 'has_gerrit' in repo:
- _process_repo_reviews(repo, runtime_storage_inst,
- record_processor_inst, rcs_inst)
-
-
-def _process_mail_list(uri, runtime_storage_inst, record_processor_inst):
- mail_iterator = mls.log(uri, runtime_storage_inst)
- mail_iterator_typed = _record_typer(mail_iterator, 'email')
- processed_mail_iterator = record_processor_inst.process(
- mail_iterator_typed)
- runtime_storage_inst.set_records(processed_mail_iterator)
-
-
-def _process_translation_stats(runtime_storage_inst, record_processor_inst):
- translation_iterator = zanata.log(runtime_storage_inst,
- CONF.translation_team_uri)
- translation_iterator_typed = _record_typer(translation_iterator, 'i18n')
- processed_translation_iterator = record_processor_inst.process(
- translation_iterator_typed)
- runtime_storage_inst.set_records(processed_translation_iterator)
-
-
-def _process_member_list(uri, runtime_storage_inst, record_processor_inst):
- member_iterator = mps.log(uri, runtime_storage_inst,
- CONF.days_to_update_members,
- CONF.members_look_ahead)
- member_iterator_typed = _record_typer(member_iterator, 'member')
- processed_member_iterator = record_processor_inst.process(
- member_iterator_typed)
- runtime_storage_inst.set_records(processed_member_iterator)
-
-
-def update_members(runtime_storage_inst, record_processor_inst):
- member_lists = runtime_storage_inst.get_by_key('member_lists') or []
- for member_list in member_lists:
- _process_member_list(member_list, runtime_storage_inst,
- record_processor_inst)
-
-
-def _post_process_records(record_processor_inst, repos):
- LOG.debug('Build release index')
- release_index = {}
- for repo in repos:
- vcs_inst = vcs.get_vcs(repo, CONF.sources_root)
- release_index.update(vcs_inst.fetch())
-
- LOG.debug('Post-process all records')
- record_processor_inst.post_processing(release_index)
-
-
-def process(runtime_storage_inst, record_processor_inst):
- repos = utils.load_repos(runtime_storage_inst)
-
- rcs_inst = rcs.get_rcs(CONF.review_uri)
- rcs_inst.setup(key_filename=CONF.ssh_key_filename,
- username=CONF.ssh_username,
- gerrit_retry=CONF.gerrit_retry)
-
- for repo in repos:
- _process_repo(repo, runtime_storage_inst, record_processor_inst,
- rcs_inst)
-
- rcs_inst.close()
-
- LOG.info('Processing mail lists')
- mail_lists = runtime_storage_inst.get_by_key('mail_lists') or []
- for mail_list in mail_lists:
- _process_mail_list(mail_list, runtime_storage_inst,
- record_processor_inst)
-
- LOG.info('Processing translations stats')
- _process_translation_stats(runtime_storage_inst, record_processor_inst)
-
- _post_process_records(record_processor_inst, repos)
-
-
-def apply_corrections(uri, runtime_storage_inst):
- LOG.info('Applying corrections from uri %s', uri)
- corrections = utils.read_json_from_uri(uri)
- if not corrections:
- LOG.error('Unable to read corrections from uri: %s', uri)
- return
-
- valid_corrections = []
- for c in corrections['corrections']:
- if 'primary_key' in c:
- valid_corrections.append(c)
- else:
- LOG.warning('Correction misses primary key: %s', c)
- runtime_storage_inst.apply_corrections(valid_corrections)
-
-
-def process_project_list(runtime_storage_inst):
- module_groups = runtime_storage_inst.get_by_key('module_groups') or {}
- releases = runtime_storage_inst.get_by_key('releases') or {}
-
- official_module_groups = governance.process_official_list(releases)
-
- LOG.debug('Update module groups with official: %s', official_module_groups)
- module_groups.update(official_module_groups)
-
- # make list of OpenStack unofficial projects
- others = module_groups.get('openstack-others')
- off_rm = module_groups.get('openstack-official', {}).get('releases')
- official = dict((r, set(m)) for r, m in six.iteritems(off_rm))
-
- for module in module_groups.get('openstack', {}).get('modules', []):
- for r, off_m in six.iteritems(official):
- if module not in off_m:
- others['releases'][r].add(module)
-
- # register modules as module groups
- repos = runtime_storage_inst.get_by_key('repos') or []
- for repo in repos:
- module = repo['module'].lower()
- module_groups[module] = utils.make_module_group(module, tag='module')
-
- # register module 'unknown' - used for emails not mapped to any module
- module_groups['unknown'] = utils.make_module_group('unknown', tag='module')
-
- runtime_storage_inst.set_by_key('module_groups', module_groups)
-
-
-def main():
- utils.init_config_and_logging(config.CONNECTION_OPTS +
- config.PROCESSOR_OPTS)
-
- runtime_storage_inst = runtime_storage.get_runtime_storage(
- CONF.runtime_storage_uri)
-
- default_data = utils.read_json_from_uri(CONF.default_data_uri)
- if not default_data:
- LOG.critical('Unable to load default data')
- return not 0
-
- try:
- jsonschema.validate(default_data, schema.default_data)
- except jsonschema.ValidationError as e:
- LOG.critical('The default data is invalid: %s' % e)
- return not 0
-
- default_data_processor.process(runtime_storage_inst,
- default_data)
-
- process_project_list(runtime_storage_inst)
-
- update_pids(runtime_storage_inst)
-
- record_processor_inst = record_processor.RecordProcessor(
- runtime_storage_inst)
-
- process(runtime_storage_inst, record_processor_inst)
-
- apply_corrections(CONF.corrections_uri, runtime_storage_inst)
-
- # long operation should be the last
- update_members(runtime_storage_inst, record_processor_inst)
-
- runtime_storage_inst.set_by_key('runtime_storage_update_time',
- utils.date_to_timestamp('now'))
- LOG.info('stackalytics-processor succeeded.')
-
-
-if __name__ == '__main__':
- main()
diff --git a/stackalytics/processor/mls.py b/stackalytics/processor/mls.py
deleted file mode 100644
index 54d13e432..000000000
--- a/stackalytics/processor/mls.py
+++ /dev/null
@@ -1,135 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from email import utils as email_utils
-import re
-
-from oslo_log import log as logging
-import six
-from six.moves.urllib import parse
-
-from stackalytics.processor import utils
-
-
-LOG = logging.getLogger(__name__)
-
-EMAIL_HEADER_PATTERN = ('From \S+(?: at \S+)?\s+'
- '\w{3}\s+\w{3}\s+\d{1,2}\s+\d{2}:\d{2}(?::\d{2})?'
- '(?:\s+\S+)?\s+\d{4}.*?\n')
-
-MAIL_BOX_PATTERN = re.compile(
- '^' + EMAIL_HEADER_PATTERN +
- 'From: (?P\S+(?: at \S+))'
- '(?:\W+(?P\w+(?:\s\w+)*))?.*?\n'
- 'Date: (?P.*?)\n'
- 'Subject: (?P.*?)(?=\n\S+:)'
- '.*?Message-ID: (?P\S+)\n'
- '\n(?P.*?)\n'
- '(?=' + EMAIL_HEADER_PATTERN + 'From: )',
- flags=re.MULTILINE | re.DOTALL)
-
-MESSAGE_PATTERNS = {
- 'bug_id': re.compile(r'https://bugs.launchpad.net/bugs/(?P\d+)',
- re.IGNORECASE),
- 'blueprint_id': re.compile(r'https://blueprints.launchpad.net/'
- r'(?P[^\/]+)/\+spec/(?P[a-z0-9-]+)',
- re.IGNORECASE),
-}
-
-TRAILING_RECORD = ('From ishakhat at mirantis.com Tue Sep 17 07:30:43 2013\n'
- 'From: ')
-
-
-def _get_mail_archive_links(uri):
- content = utils.read_uri(uri)
- if not content:
- LOG.warning('Mail archive list is not found at %s', uri)
- return []
-
- links = set(re.findall(r'\shref\s*=\s*[\'"]([^\'"]*\.txt\.gz)', content,
- flags=re.IGNORECASE))
- return [parse.urljoin(uri, link) for link in links]
-
-
-def _uri_content_changed(uri, runtime_storage_inst):
- LOG.debug('Check changes for mail archive located at: %s', uri)
- last_modified = utils.get_uri_last_modified(uri)
-
- if last_modified != runtime_storage_inst.get_by_key('mail_link:' + uri):
- LOG.debug('Mail archive changed, last modified at: %s', last_modified)
- runtime_storage_inst.set_by_key('mail_link:' + uri, last_modified)
- return True
-
- return False
-
-
-def _optimize_body(email_body):
- result = []
- for line in email_body.split('\n'):
- line = line.strip()
-
- if line[:1] == '>' or line[:8] == '--------':
- continue # ignore replies and part delimiters
-
- if (not result) or (result and result[-1] != line):
- result.append(line)
-
- return '\n'.join(result)
-
-
-def _retrieve_mails(uri):
- LOG.debug('Retrieving mail archive from: %s', uri)
- content = utils.read_gzip_from_uri(uri)
- if not content:
- LOG.error('Error reading mail archive from: %s', uri)
- return
-
- LOG.debug('Mail archive is loaded, start processing')
-
- content += TRAILING_RECORD
-
- for rec in re.finditer(MAIL_BOX_PATTERN, content):
- email = rec.groupdict()
- email['author_email'] = email['author_email'].replace(' at ', '@', 1)
- if not utils.check_email_validity(email['author_email']):
- continue
-
- email['date'] = int(email_utils.mktime_tz(
- email_utils.parsedate_tz(email['date'])))
-
- email['body'] = _optimize_body(email['body'])
-
- for pattern_name, pattern in six.iteritems(MESSAGE_PATTERNS):
- collection = set()
- for item in re.finditer(pattern, email['body']):
- groups = item.groupdict()
- item_id = groups['id']
- if 'module' in groups:
- item_id = groups['module'] + ':' + item_id
- email['module'] = groups['module']
- collection.add(item_id)
- email[pattern_name] = list(collection)
-
- yield email
-
-
-def log(uri, runtime_storage_inst):
-
- links = _get_mail_archive_links(uri)
- for link in links:
- if _uri_content_changed(link, runtime_storage_inst):
- for mail in _retrieve_mails(link):
- LOG.debug('New mail: %s', mail['message_id'])
- yield mail
diff --git a/stackalytics/processor/mps.py b/stackalytics/processor/mps.py
deleted file mode 100644
index 933d321e4..000000000
--- a/stackalytics/processor/mps.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import random
-import re
-import time
-
-from oslo_log import log as logging
-import requests
-import six
-
-from stackalytics.processor import utils
-
-
-LOG = logging.getLogger(__name__)
-
-NAME_AND_DATE_PATTERN = (r'(?P[^<]*)[\s\S]*?'
- r'(?P
[^<]*)')
-COMPANY_PATTERN = (r'Date\sJoined[\s\S]*?(?P[^<]*)'
- r'[\s\S]*?From\s(?P[\s\S]*?)\(Current\)')
-GARBAGE_PATTERN = r'[/\\~%^\*_]+'
-
-
-def strip_garbage(s):
- return re.sub(r'\s+', ' ', re.sub(GARBAGE_PATTERN, '', s))
-
-
-def _retrieve_member(requests_session, uri, member_id, html_parser):
-
- content = utils.read_uri(uri, session=requests_session)
-
- if not content:
- return {}
-
- member = {}
-
- for rec in re.finditer(NAME_AND_DATE_PATTERN, content):
- result = rec.groupdict()
-
- member['member_id'] = member_id
- member['member_name'] = strip_garbage(result['member_name'])
- member['date_joined'] = result['date_joined']
- member['member_uri'] = uri
- break
-
- member['company_draft'] = '*independent'
- for rec in re.finditer(COMPANY_PATTERN, content):
- result = rec.groupdict()
-
- member['company_draft'] = strip_garbage(
- html_parser.unescape(result['company_draft']))
-
- return member
-
-
-def log(uri, runtime_storage_inst, days_to_update_members, members_look_ahead):
- LOG.debug('Retrieving new openstack.org members')
-
- last_update_members_date = runtime_storage_inst.get_by_key(
- 'last_update_members_date') or 0
- last_member_index = runtime_storage_inst.get_by_key(
- 'last_member_index') or 0
-
- end_update_date = int(time.time()) - days_to_update_members * 24 * 60 * 60
-
- if last_update_members_date <= end_update_date:
- last_member_index = 0
- last_update_members_date = int(time.time())
-
- runtime_storage_inst.set_by_key('last_update_members_date',
- last_update_members_date)
-
- cnt_empty = 0
- cur_index = last_member_index + 1
- html_parser = six.moves.html_parser.HTMLParser()
- requests_session = requests.Session()
-
- while cnt_empty < members_look_ahead:
-
- profile_uri = uri + str(cur_index)
- member = _retrieve_member(requests_session, profile_uri,
- str(cur_index), html_parser)
-
- if 'member_name' not in member:
- cnt_empty += 1
- cur_index += 1
- continue
-
- cnt_empty = 0
- last_member_index = cur_index
- cur_index += 1
- LOG.debug('New member: %s', member['member_id'])
- yield member
-
- time.sleep(random.random() * 5)
-
- requests_session.close()
- LOG.debug('Last_member_index: %s', last_member_index)
- runtime_storage_inst.set_by_key('last_member_index', last_member_index)
diff --git a/stackalytics/processor/normalizer.py b/stackalytics/processor/normalizer.py
deleted file mode 100644
index 578082223..000000000
--- a/stackalytics/processor/normalizer.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import six
-
-from stackalytics.processor import user_processor
-from stackalytics.processor import utils
-
-
-def _normalize_user(user):
- for c in user['companies']:
- c['end_date'] = utils.date_to_timestamp(c['end_date'])
-
- # sort companies by end_date
- def end_date_comparator(x, y):
- if x["end_date"] == 0:
- return 1
- elif y["end_date"] == 0:
- return -1
- else:
- return x["end_date"] - y["end_date"]
-
- user['companies'].sort(key=utils.cmp_to_key(end_date_comparator))
- if user['companies']:
- if user['companies'][-1]['end_date'] != 0:
- user['companies'].append(dict(company_name='*independent',
- end_date=0))
- user['user_id'] = user_processor.make_user_id(
- launchpad_id=user.get('launchpad_id'),
- emails=user.get('emails'))
-
-
-def _normalize_users(users):
- for user in users:
- _normalize_user(user)
-
-
-def _normalize_releases(releases):
- for release in releases:
- release['release_name'] = release['release_name'].lower()
- release['end_date'] = utils.date_to_timestamp(release['end_date'])
- releases.sort(key=lambda x: x['end_date'])
-
-
-def _normalize_repos(repos):
- for repo in repos:
- if 'releases' not in repo:
- repo['releases'] = [] # release will be assigned automatically
-
-
-NORMALIZERS = {
- 'users': _normalize_users,
- 'releases': _normalize_releases,
- 'repos': _normalize_repos,
-}
-
-
-def normalize_default_data(default_data):
- for key, normalizer in six.iteritems(NORMALIZERS):
- if key in default_data:
- normalizer(default_data[key])
diff --git a/stackalytics/processor/openstackid_utils.py b/stackalytics/processor/openstackid_utils.py
deleted file mode 100644
index 216a9f32e..000000000
--- a/stackalytics/processor/openstackid_utils.py
+++ /dev/null
@@ -1,115 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import logging
-
-import requests
-
-from stackalytics.processor import utils
-
-
-LOG = logging.getLogger(__name__)
-
-OSID_URI = ('https://openstackid-resources.openstack.org/'
- 'api/public/v1/members?'
- 'filter=email==%s&relations=all_affiliations')
-INTERVAL_GAP_THRESHOLD = 60 * 60 * 24 # ignore gaps shorter than this
-
-_openstackid_session = requests.Session()
-
-
-def _openstack_profile_by_email(email):
- LOG.debug('Lookup user email %s at OpenStackID', email)
- uri = OSID_URI % email
- data = utils.read_json_from_uri(uri, session=_openstackid_session)
-
- if not data:
- return None
-
- if not data.get('data'):
- return None # not found
-
- return data['data'][-1] # return the last (most recent) record
-
-
-Interval = collections.namedtuple('Interval', ['start', 'end', 'value'])
-
-
-def _cut_open_ended_intervals(intervals):
- """Keep only one open interval
-
- If there are multiple open intervals keep only the latest open;
- cut others so they no longer intersect each other.
-
- :param intervals: [Interval]
- :return: processed intervals: [Interval]
- """
- filtered_intervals = []
- cut = 0
- for interval in reversed(intervals):
- if not interval.end:
- new_interval = Interval(interval.start, cut, interval.value)
- filtered_intervals.append(new_interval)
- cut = interval.start
- else:
- filtered_intervals.append(interval)
- return list(reversed(filtered_intervals))
-
-
-def _iterate_intervals(intervals, threshold=INTERVAL_GAP_THRESHOLD):
- """Iterate intervals and fill gaps around of them
-
- :param intervals: list of Interval objects
- :param threshold: do not yield intervals shorted than threshold
- """
- if not intervals:
- yield Interval(0, 0, None)
- else:
- intervals.sort(key=lambda x: x.start)
- intervals = _cut_open_ended_intervals(intervals)
-
- prev_start = 0
-
- for interval in intervals:
- if interval.start and interval.start - prev_start > threshold:
- yield Interval(prev_start, interval.start, None) # prior
-
- yield interval
-
- prev_start = interval.end
-
- last_end = intervals[-1].end
- if last_end:
- yield Interval(last_end, 0, None)
-
-
-def user_profile_by_email(email):
- data = _openstack_profile_by_email(email)
-
- if not data: # user is not found
- return None
-
- intervals = [Interval(a.get('start_date'), a.get('end_date') or 0,
- a.get('organization', {}).get('name'))
- for a in data.get('affiliations', [])]
- companies = [dict(company_name=interval.value or '*independent',
- end_date=interval.end)
- for interval in _iterate_intervals(intervals)]
- user = {
- 'openstack_id': data['id'],
- 'user_name': ' '.join([data.get('first_name'), data.get('last_name')]),
- 'emails': [email],
- 'companies': companies,
- }
- return user
diff --git a/stackalytics/processor/rcs.py b/stackalytics/processor/rcs.py
deleted file mode 100644
index 88e37791c..000000000
--- a/stackalytics/processor/rcs.py
+++ /dev/null
@@ -1,215 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import re
-
-from oslo_log import log as logging
-import paramiko
-import time
-
-LOG = logging.getLogger(__name__)
-
-DEFAULT_PORT = 29418
-GERRIT_URI_PREFIX = r'^gerrit:\/\/'
-PAGE_LIMIT = 100
-REQUEST_COUNT_LIMIT = 20
-SSH_ERRORS_LIMIT = 10
-
-
-class RcsException(Exception):
- pass
-
-
-class Rcs(object):
- """Base object for Review Control System"""
-
- def __init__(self):
- pass
-
- def setup(self, **kwargs):
- return True
-
- def get_project_list(self):
- pass
-
- def log(self, repo, branch, last_retrieval_time, status=None,
- grab_comments=False):
- return []
-
- def close(self):
- pass
-
-
-class Gerrit(Rcs):
- def __init__(self, uri):
- super(Gerrit, self).__init__()
-
- stripped = re.sub(GERRIT_URI_PREFIX, '', uri)
- if stripped:
- self.hostname, semicolon, self.port = stripped.partition(':')
- if not self.port:
- self.port = DEFAULT_PORT
- else:
- raise RcsException('Invalid rcs uri %s' % uri)
-
- self.key_filename = None
- self.username = None
- self.ssh_errors_limit = SSH_ERRORS_LIMIT
-
- self.client = paramiko.SSHClient()
- self.client.load_system_host_keys()
- self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
-
- self.request_count = 0
- self.error_count = 0
-
- def __del__(self):
- self.close()
-
- def setup(self, **kwargs):
- self.key_filename = kwargs.get('key_filename')
- self.username = kwargs.get('username')
- self.ssh_errors_limit = kwargs.get('gerrit_retry') or SSH_ERRORS_LIMIT
-
- self._connect()
-
- def _connect(self):
- try:
- self.client.connect(self.hostname, port=self.port,
- key_filename=self.key_filename,
- username=self.username)
- LOG.debug('Successfully connected to Gerrit')
- except Exception as e:
- LOG.error('Failed to connect to gerrit %(host)s:%(port)s. '
- 'Error: %(err)s',
- {'host': self.hostname, 'port': self.port, 'err': e},
- exc_info=True)
- raise RcsException('Failed to connect to gerrit: %s' % e)
-
- def _get_cmd(self, project_organization, module, branch, age=0,
- status=None, limit=PAGE_LIMIT, grab_comments=False):
- cmd = ('gerrit query --all-approvals --patch-sets --format JSON '
- 'project:\'%(ogn)s/%(module)s\' branch:%(branch)s '
- 'limit:%(limit)s age:%(age)ss' %
- {'ogn': project_organization, 'module': module,
- 'branch': branch, 'limit': limit, 'age': age})
- if status:
- cmd += ' status:%s' % status
- if grab_comments:
- cmd += ' --comments'
- return cmd
-
- def _exec_command(self, cmd):
- # check how many requests were sent over connection and reconnect
- if self.request_count >= REQUEST_COUNT_LIMIT:
- self.close()
- self.request_count = 0
- self._connect()
- else:
- self.request_count += 1
-
- try:
- return self.client.exec_command(cmd)
- except Exception as e:
- LOG.error('Error %(error)s while execute command %(cmd)s',
- {'error': e, 'cmd': cmd}, exc_info=True)
- self.request_count = REQUEST_COUNT_LIMIT
- raise RcsException(e)
-
- def _exec_command_with_retrial(self, cmd):
- while self.error_count < self.ssh_errors_limit:
- try:
- return self._exec_command(cmd)
- except RcsException:
- self.error_count += 1
-
- raise RcsException('Too many SSH errors, aborting. Consider '
- 'increasing "gerrit_retry" value')
-
- def _poll_reviews(self, project_organization, module, branch,
- last_retrieval_time, status=None, grab_comments=False):
- age = 0
- proceed = True
-
- # the algorithm retrieves reviews by age; the next page is started
- # with the time of the oldest; it is possible that the oldest
- # will be included in consequent result (as the age offsets to local
- # machine timestamp, but evaluated remotely), so we need to track all
- # ids and ignore those we've already seen
- processed = set()
-
- while proceed:
- cmd = self._get_cmd(project_organization, module, branch,
- age=age, status=status,
- grab_comments=grab_comments)
- LOG.debug('Executing command: %s', cmd)
- exec_result = self._exec_command_with_retrial(cmd)
- if not exec_result:
- break
- stdin, stdout, stderr = exec_result
-
- proceed = False # assume there are no more reviews available
- for line in stdout:
- review = json.loads(line)
-
- if 'number' not in review:
- continue # Skip summary reviews
-
- if review['number'] in processed:
- continue # already seen that
-
- last_updated = int(review['lastUpdated'])
- if last_updated < last_retrieval_time: # too old
- proceed = False
- break
-
- proceed = True # have at least one review, can dig deeper
- age = max(age, int(time.time()) - last_updated)
- processed.add(review['number'])
- review['module'] = module
- yield review
-
- def get_project_list(self):
- exec_result = self._exec_command_with_retrial('gerrit ls-projects')
- if not exec_result:
- raise RcsException("Gerrit returned no projects")
- stdin, stdout, stderr = exec_result
- result = [line.strip() for line in stdout]
-
- return result
-
- def log(self, repo, branch, last_retrieval_time, status=None,
- grab_comments=False):
- # poll reviews down from top between last_r_t and current_r_t
- LOG.debug('Poll reviews for module: %s', repo['module'])
- for review in self._poll_reviews(
- repo['organization'], repo['module'], branch,
- last_retrieval_time, status=status,
- grab_comments=grab_comments):
- yield review
-
- def close(self):
- self.client.close()
-
-
-def get_rcs(uri):
- LOG.debug('Review control system is requested for uri %s' % uri)
- match = re.search(GERRIT_URI_PREFIX, uri)
- if match:
- return Gerrit(uri)
- else:
- LOG.warning('Unsupported review control system, fallback to dummy')
- return Rcs()
diff --git a/stackalytics/processor/record_processor.py b/stackalytics/processor/record_processor.py
deleted file mode 100644
index e93d5064a..000000000
--- a/stackalytics/processor/record_processor.py
+++ /dev/null
@@ -1,783 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import bisect
-import collections
-import copy
-import functools
-import time
-
-from oslo_config import cfg
-from oslo_log import log as logging
-import six
-
-from stackalytics.processor import launchpad_utils
-from stackalytics.processor import user_processor
-from stackalytics.processor import utils
-
-
-CONF = cfg.CONF
-LOG = logging.getLogger(__name__)
-
-
-class RecordProcessor(object):
- def __init__(self, runtime_storage_inst):
- self.runtime_storage_inst = runtime_storage_inst
-
- self.domains_index = runtime_storage_inst.get_by_key('companies')
-
- self.releases = runtime_storage_inst.get_by_key('releases')
- self.releases_dates = [r['end_date'] for r in self.releases]
-
- self.modules = None
- self.alias_module_map = None
-
- def _get_release(self, timestamp):
- release_index = bisect.bisect(self.releases_dates, timestamp)
- if release_index >= len(self.releases):
- LOG.warning('Timestamp %s is beyond releases boundaries, the last '
- 'release will be used. Please consider adding a '
- 'new release into default_data.json', timestamp)
- release_index = len(self.releases) - 1
- return self.releases[release_index]['release_name']
-
- def _get_modules(self):
- if self.modules is None:
- self.modules = set()
- self.alias_module_map = dict()
-
- for repo in utils.load_repos(self.runtime_storage_inst):
- module = repo['module'].lower()
- module_aliases = repo.get('aliases') or []
-
- add = True
- for module_name in ([module] + module_aliases):
- for m in self.modules:
- if module_name.find(m) >= 0:
- add = False
- break
- if m.find(module_name) >= 0:
- self.modules.remove(m)
- break
- if add:
- self.modules.add(module_name)
-
- for alias in module_aliases:
- self.alias_module_map[alias] = module
-
- return self.modules, self.alias_module_map
-
- def _need_to_fetch_launchpad(self):
- return CONF.fetching_user_source == 'launchpad'
-
- def _update_user(self, record):
- email = record.get('author_email')
- user_e = user_processor.load_user(
- self.runtime_storage_inst, email=email) or {}
-
- user_name = record.get('author_name')
- launchpad_id = record.get('launchpad_id')
- if (self._need_to_fetch_launchpad() and email and (not user_e) and
- (not launchpad_id) and (not user_e.get('launchpad_id'))):
- # query LP
- launchpad_id, lp_user_name = launchpad_utils.query_lp_info(email)
- if lp_user_name:
- user_name = lp_user_name
-
- gerrit_id = record.get('gerrit_id')
- if gerrit_id:
- user_g = user_processor.load_user(
- self.runtime_storage_inst, gerrit_id=gerrit_id) or {}
- if (self._need_to_fetch_launchpad() and (not user_g) and
- (not launchpad_id) and (not user_e.get('launchpad_id'))):
- # query LP
- guessed_lp_id = gerrit_id
- lp_user_name = launchpad_utils.query_lp_user_name(
- guessed_lp_id)
- if lp_user_name == user_name:
- launchpad_id = guessed_lp_id
- else:
- user_g = {}
-
- zanata_id = record.get('zanata_id')
- if zanata_id:
- user_z = user_processor.load_user(
- self.runtime_storage_inst, zanata_id=zanata_id) or {}
- if (self._need_to_fetch_launchpad() and (not user_z) and
- (not launchpad_id) and (not user_e.get('launchpad_id'))):
- # query LP
- guessed_lp_id = zanata_id
- user_name = launchpad_utils.query_lp_user_name(guessed_lp_id)
- if user_name != guessed_lp_id:
- launchpad_id = guessed_lp_id
- else:
- user_z = {}
-
- user_l = user_processor.load_user(
- self.runtime_storage_inst, launchpad_id=launchpad_id) or {}
-
- if user_processor.are_users_same([user_e, user_l, user_g, user_z]):
- # If sequence numbers are set and the same, merge is not needed
- return user_e
-
- user = user_processor.create_user(
- self.domains_index, launchpad_id, email, gerrit_id, zanata_id,
- user_name)
-
- if user_e or user_l or user_g or user_z:
- # merge between existing profiles and a new one
- user, users_to_delete = user_processor.merge_user_profiles(
- self.domains_index, [user_e, user_l, user_g, user_z, user])
-
- # delete all unneeded profiles
- user_processor.delete_users(
- self.runtime_storage_inst, users_to_delete)
- else:
- # create new profile
- if (self._need_to_fetch_launchpad() and not user_name):
- user_name = launchpad_utils.query_lp_user_name(launchpad_id)
- if user_name:
- user['user_name'] = user_name
- LOG.debug('Created new user: %s', user)
-
- user_processor.store_user(self.runtime_storage_inst, user)
- LOG.debug('Stored user: %s', user)
-
- return user
-
- def _update_record_and_user(self, record):
- user = self._update_user(record)
-
- record['user_id'] = user['user_id']
- if user.get('user_name'):
- record['author_name'] = user['user_name']
-
- company, policy = user_processor.get_company_for_date(
- user['companies'], record['date'])
-
- if not user.get('static'):
- # for auto-generated profiles affiliation may be overridden
- if company != '*robots' and policy == 'open':
- company = (user_processor.get_company_by_email(
- self.domains_index, record.get('author_email')) or company)
-
- record['company_name'] = company
-
- def _process_commit(self, record):
- record['primary_key'] = record['commit_id']
- record['loc'] = record['lines_added'] + record['lines_deleted']
- record['author_email'] = record['author_email'].lower()
- record['commit_date'] = record['date']
-
- coauthors = record.get('coauthor')
- if not coauthors:
- self._update_record_and_user(record)
-
- if record['company_name'] != '*robots':
- yield record
- else:
- if record['author_email'] not in [
- c['author_email'] for c in coauthors]:
- coauthors.append({'author_name': record['author_name'],
- 'author_email': record['author_email']})
- for coauthor in coauthors:
- coauthor['date'] = record['date']
- self._update_record_and_user(coauthor)
-
- for coauthor in coauthors:
- new_record = copy.deepcopy(record)
- new_record.update(coauthor)
- new_record['primary_key'] += coauthor['author_email']
-
- yield new_record
-
- def _make_review_record(self, record):
- # copy everything except patchsets and flatten user data
- review = dict([(k, v) for k, v in six.iteritems(record)
- if k not in ['patchSets', 'owner', 'createdOn',
- 'comments']])
- owner = record['owner']
-
- review['primary_key'] = review['id']
- if owner.get('username'):
- review['gerrit_id'] = owner['username']
- review['author_name'] = (owner.get('name') or owner.get('username')
- or 'Anonymous Coward') # do it like gerrit
- if owner.get('email'):
- review['author_email'] = owner['email'].lower()
- review['date'] = record['createdOn']
-
- patch_sets = record.get('patchSets', [])
- review['updated_on'] = review['date']
- if patch_sets:
- patch = patch_sets[-1]
- if 'approvals' in patch:
- review['value'] = min([int(p['value'])
- for p in patch['approvals']])
- review['updated_on'] = patch['approvals'][0]['grantedOn']
- else:
- review['updated_on'] = patch['createdOn']
-
- if 'value' not in review:
- review['value'] = 0
-
- self._update_record_and_user(review)
- return review
-
- def _make_patch_record(self, review, patch):
- patch_record = dict()
- patch_record['record_type'] = 'patch'
- patch_record['primary_key'] = utils.get_patch_id(
- review['id'], patch['number'])
- patch_record['number'] = patch['number']
- patch_record['date'] = patch['createdOn']
- uploader = patch['uploader']
- if uploader.get('username'):
- patch_record['gerrit_id'] = uploader['username']
- patch_record['author_name'] = (uploader.get('name')
- or uploader.get('username')
- or 'Anonymous Coward')
- if uploader.get('email'):
- patch_record['author_email'] = uploader['email'].lower()
- patch_record['module'] = review['module']
- patch_record['branch'] = review['branch']
- patch_record['review_id'] = review['id']
-
- self._update_record_and_user(patch_record)
- return patch_record
-
- def _make_mark_record(self, review, patch, approval):
- # copy everything and flatten user data
- mark = dict([(k, v) for k, v in six.iteritems(approval)
- if k not in ['by', 'grantedOn', 'value', 'description']])
- reviewer = approval['by']
-
- mark['record_type'] = 'mark'
- mark['value'] = int(approval['value'])
- mark['date'] = approval['grantedOn']
- mark['primary_key'] = (review['id'] + str(mark['date']) + mark['type'])
- mark['gerrit_id'] = reviewer['username']
- mark['author_name'] = reviewer.get('name') or reviewer.get('username')
- mark['author_email'] = reviewer['email'].lower()
- mark['module'] = review['module']
- mark['branch'] = review['branch']
- mark['review_id'] = review['id']
- mark['patch'] = int(patch['number'])
-
- if reviewer['username'] == patch['uploader'].get('username'):
- # reviewer is the same as author of the patch
- mark['type'] = 'Self-%s' % mark['type']
-
- self._update_record_and_user(mark)
- return mark
-
- def _process_review(self, record):
- """Process a review.
-
- Review spawns into records of three types:
- * review - records that a user created review request
- * patch - records that a user submitted another patch set
- * mark - records that a user set approval mark to given review
- """
- owner = record['owner']
- if 'email' in owner or 'username' in owner:
- yield self._make_review_record(record)
-
- for patch in record.get('patchSets', []):
- if (('email' in patch['uploader']) or
- ('username' in patch['uploader'])):
- yield self._make_patch_record(record, patch)
-
- if 'approvals' not in patch:
- continue # not reviewed by anyone
-
- for approval in patch['approvals']:
- if approval['type'] not in ('Code-Review', 'Workflow'):
- continue # keep only Code-Review and Workflow
- if ('email' not in approval['by'] or
- 'username' not in approval['by']):
- continue # ignore
-
- yield self._make_mark_record(record, patch, approval)
-
- # check for abandon action
- if record.get('status') == 'ABANDONED':
- for comment in reversed(record.get('comments') or []):
- if comment['message'] == 'Abandoned':
- action = dict(type='Abandon', value=0)
- action['by'] = comment['reviewer']
- action['grantedOn'] = comment['timestamp']
-
- if ('email' not in action['by'] or
- 'username' not in action['by']):
- continue # ignore
-
- yield self._make_mark_record(
- record, record['patchSets'][-1], action)
-
- def _guess_module(self, record):
- subject = record['subject'].lower()
- pos = len(subject)
- best_guess_module = None
-
- modules, alias_module_map = self._get_modules()
- for module in modules:
- find = subject.find(module)
- if (find >= 0) and (find < pos):
- pos = find
- best_guess_module = module
-
- if best_guess_module:
- if (((pos > 0) and (subject[pos - 1] == '[')) or
- (not record.get('module'))):
- record['module'] = best_guess_module
-
- if not record.get('module'):
- record['module'] = 'unknown'
- elif record['module'] in alias_module_map:
- record['module'] = alias_module_map[record['module']]
-
- def _process_email(self, record):
- record['primary_key'] = record['message_id']
- record['author_email'] = record['author_email'].lower()
-
- self._update_record_and_user(record)
- self._guess_module(record)
-
- if not record.get('blueprint_id'):
- del record['body']
- elif len(record['body']) > 4000:
- record['body'] = record['body'][:4000] + '...'
-
- yield record
-
- def _process_blueprint(self, record):
- bpd_author = record.get('drafter') or record.get('owner')
-
- bpd = dict([(k, v) for k, v in six.iteritems(record)
- if k.find('_link') < 0])
- bpd['record_type'] = 'bpd'
- bpd['primary_key'] = 'bpd:' + record['id']
- bpd['launchpad_id'] = bpd_author
- bpd['date'] = record['date_created']
- bpd['web_link'] = record.get('web_link')
-
- self._update_record_and_user(bpd)
-
- yield bpd
-
- if (record.get('assignee') and record['date_completed'] and
- record.get('implementation_status') == 'Implemented'):
- bpc = dict([(k, v) for k, v in six.iteritems(record)
- if k.find('_link') < 0])
- bpc['record_type'] = 'bpc'
- bpc['primary_key'] = 'bpc:' + record['id']
- bpc['launchpad_id'] = record['assignee']
- bpc['date'] = record['date_completed']
-
- self._update_record_and_user(bpc)
-
- yield bpc
-
- def _process_bug(self, record):
-
- bug_created = record.copy()
- bug_created['primary_key'] = 'bugf:' + record['id']
- bug_created['record_type'] = 'bugf'
- bug_created['launchpad_id'] = record.get('owner')
- bug_created['date'] = record['date_created']
-
- self._update_record_and_user(bug_created)
-
- yield bug_created
-
- FIXED_BUGS = ['Fix Committed', 'Fix Released']
- if (('date_fix_committed' in record or 'date_fix_released' in record)
- and record['status'] in FIXED_BUGS):
- bug_fixed = record.copy()
- bug_fixed['primary_key'] = 'bugr:' + record['id']
- bug_fixed['record_type'] = 'bugr'
- bug_fixed['launchpad_id'] = record.get('assignee') or '*unassigned'
- # It appears that launchpad automatically sets the
- # date_fix_committed field when a bug moves from an open
- # state to Fix Released, however it isn't clear that this
- # is documented. So, we take the commit date if it is
- # present or the release date if no commit date is
- # present.
- bug_fixed['date'] = (
- record.get('date_fix_committed') or
- record['date_fix_released']
- )
-
- self._update_record_and_user(bug_fixed)
-
- yield bug_fixed
-
- def _process_member(self, record):
- user_id = user_processor.make_user_id(member_id=record['member_id'])
- record['primary_key'] = user_id
- record['date'] = utils.member_date_to_timestamp(record['date_joined'])
- record['author_name'] = record['member_name']
- record['module'] = 'unknown'
- company_draft = record['company_draft']
-
- company_name = self.domains_index.get(utils.normalize_company_name(
- company_draft)) or (utils.normalize_company_draft(company_draft))
-
- # author_email is a key to create new user
- record['author_email'] = user_id
- record['company_name'] = company_name
- # _update_record_and_user function will create new user if needed
- self._update_record_and_user(record)
- record['company_name'] = company_name
- user = user_processor.load_user(self.runtime_storage_inst,
- user_id=user_id)
-
- user['user_name'] = record['author_name']
- user['companies'] = [{
- 'company_name': company_name,
- 'end_date': 0,
- }]
- user['company_name'] = company_name
-
- user_processor.store_user(self.runtime_storage_inst, user)
-
- record['company_name'] = company_name
-
- yield record
-
- def _process_translation(self, record):
- # todo split translation and approval
- translation = record.copy()
- user_id = user_processor.make_user_id(zanata_id=record['zanata_id'])
-
- translation['record_type'] = 'tr'
- translation['primary_key'] = '%s:%s:%s:%s' % (
- user_id, record['module'], record['date'], record['branch'])
- translation['author_name'] = user_id
-
- # following fields are put into standard fields stored in dashboard mem
- translation['loc'] = record['translated']
- translation['value'] = record['language']
-
- self._update_record_and_user(translation)
-
- yield translation
-
- def _renew_record_date(self, record):
- record['week'] = utils.timestamp_to_week(record['date'])
- if ('release' not in record) or (not record['release']):
- record['release'] = self._get_release(record['date'])
-
- def process(self, record_iterator):
- PROCESSORS = {
- 'commit': self._process_commit,
- 'review': self._process_review,
- 'email': self._process_email,
- 'bp': self._process_blueprint,
- 'bug': self._process_bug,
- 'member': self._process_member,
- 'i18n': self._process_translation,
- }
-
- for record in record_iterator:
- for r in PROCESSORS[record['record_type']](record):
- self._renew_record_date(r)
- yield r
-
- def _update_records_with_releases(self, release_index):
- LOG.info('Update records with releases')
-
- def record_handler(record):
- if (record['record_type'] == 'commit'
- and record['primary_key'] in release_index):
- release = release_index[record['primary_key']]
- else:
- release = self._get_release(record['date'])
-
- if record['release'] != release:
- record['release'] = release
- yield record
-
- yield record_handler
-
- def _update_records_with_user_info(self):
- LOG.info('Update user info in records')
-
- def record_handler(record):
- company_name = record['company_name']
- user_id = record['user_id']
- author_name = record['author_name']
-
- self._update_record_and_user(record)
-
- if ((record['company_name'] != company_name) or
- (record['user_id'] != user_id) or
- (record['author_name'] != author_name)):
- LOG.debug('User info (%(id)s, %(name)s, %(company)s) has '
- 'changed in record %(record)s',
- {'id': user_id, 'name': author_name,
- 'company': company_name, 'record': record})
- yield record
-
- yield record_handler
-
- def _update_commits_with_merge_date(self):
- LOG.info('Update commits with merge date')
-
- change_id_to_date = {}
-
- def record_handler_pass_1(record):
- if (record['record_type'] == 'review' and
- record.get('status') == 'MERGED'):
- change_id_to_date[record['id']] = record['lastUpdated']
-
- yield record_handler_pass_1
-
- LOG.info('Update commits with merge date: pass 2')
-
- def record_handler_pass_2(record):
- if record['record_type'] == 'commit':
- change_id_list = record.get('change_id')
- if change_id_list and len(change_id_list) == 1:
- change_id = change_id_list[0]
- if change_id in change_id_to_date:
- old_date = record['date']
- if old_date != change_id_to_date[change_id]:
- record['date'] = change_id_to_date[change_id]
- self._renew_record_date(record)
- LOG.debug('Date %(date)s has changed in record '
- '%(record)s', {'date': old_date,
- 'record': record})
- yield record
-
- yield record_handler_pass_2
-
- def _update_blueprints_with_mention_info(self):
- LOG.info('Process blueprints and calculate mention info')
-
- valid_blueprints = {}
- mentioned_blueprints = {}
-
- def record_handler_pass_1(record):
- for bp in record.get('blueprint_id', []):
- if bp in mentioned_blueprints:
- mentioned_blueprints[bp]['count'] += 1
- if record['date'] > mentioned_blueprints[bp]['date']:
- mentioned_blueprints[bp]['date'] = record['date']
- else:
- mentioned_blueprints[bp] = {
- 'count': 1,
- 'date': record['date']
- }
- if record['record_type'] in ['bpd', 'bpc']:
- valid_blueprints[record['id']] = {
- 'primary_key': record['primary_key'],
- 'count': 0,
- 'date': record['date']
- }
-
- yield record_handler_pass_1
-
- for bp_name, bp in six.iteritems(valid_blueprints):
- if bp_name in mentioned_blueprints:
- bp['count'] = mentioned_blueprints[bp_name]['count']
- bp['date'] = mentioned_blueprints[bp_name]['date']
- else:
- bp['count'] = 0
- bp['date'] = 0
-
- LOG.info('Process blueprints and calculate mention info: pass 2')
-
- def record_handler_pass_2(record):
- need_update = False
-
- valid_bp = set([])
- for bp in record.get('blueprint_id', []):
- if bp in valid_blueprints:
- valid_bp.add(bp)
- else:
- LOG.debug('Update record %s: removed invalid bp: %s',
- record['primary_key'], bp)
- need_update = True
- record['blueprint_id'] = list(valid_bp)
-
- if record['record_type'] in ['bpd', 'bpc']:
- bp = valid_blueprints[record['id']]
- if ((record.get('mention_count') != bp['count']) or
- (record.get('mention_date') != bp['date'])):
- record['mention_count'] = bp['count']
- record['mention_date'] = bp['date']
- LOG.debug('Update record %s: mention stats: (%s:%s)',
- record['primary_key'], bp['count'], bp['date'])
- need_update = True
-
- if need_update:
- yield record
-
- yield record_handler_pass_2
-
- def _determine_core_contributors(self):
- LOG.info('Determine core contributors')
-
- module_branches = collections.defaultdict(set)
- quarter_ago = int(time.time()) - 60 * 60 * 24 * 30 * 3 # a quarter ago
-
- def record_handler(record):
- if (record['record_type'] == 'mark' and
- record['date'] > quarter_ago and
- record['value'] in [2, -2]):
- module_branch = (record['module'], record['branch'])
- user_id = record['user_id']
- module_branches[user_id].add(module_branch)
-
- yield record_handler
-
- for user in self.runtime_storage_inst.get_all_users():
- core_old = user.get('core')
- user_module_branch = module_branches.get(user['user_id'])
- if user_module_branch:
- user['core'] = list(user_module_branch)
- elif user.get('core'):
- del user['core']
-
- if user.get('core') != core_old:
- user_processor.store_user(self.runtime_storage_inst, user)
-
- def _close_patch(self, cores, marks):
- if len(marks) < 2:
- return
-
- core_mark = 0
- for mark in sorted(marks, key=lambda x: x['date'], reverse=True):
-
- if core_mark == 0:
- if (mark['module'], mark['branch'], mark['user_id']) in cores:
- # mark is from core engineer
- core_mark = mark['value']
- continue
-
- disagreement = ((core_mark != 0) and
- ((core_mark < 0 < mark['value']) or
- (core_mark > 0 > mark['value'])))
- old_disagreement = mark.get('disagreement', False)
- mark['disagreement'] = disagreement
- if old_disagreement != disagreement:
- yield mark
-
- def _update_marks_with_disagreement(self):
- LOG.info('Process marks to find disagreements')
-
- cores = set()
- for user in self.runtime_storage_inst.get_all_users():
- for (module, branch) in (user.get('core') or []):
- cores.add((module, branch, user['user_id']))
-
- # map from review_id to current patch and list of marks
- marks_per_patch = collections.defaultdict(
- lambda: {'patch_number': 0, 'marks': []})
-
- def record_handler(record):
- if (record['record_type'] == 'mark' and
- record['type'] == 'Code-Review'):
- review_id = record['review_id']
- patch_number = record['patch']
-
- if review_id in marks_per_patch:
- # review is already seen, check if patch is newer
- if (marks_per_patch[review_id]['patch_number'] <
- patch_number):
- # the patch is new, close the current
- for processed in self._close_patch(
- cores, marks_per_patch[review_id]['marks']):
- yield processed
- del marks_per_patch[review_id]
-
- marks_per_patch[review_id]['patch_number'] = patch_number
- marks_per_patch[review_id]['marks'].append(record)
-
- yield record_handler
-
- # purge the rest
- for marks_patch in marks_per_patch.values():
- self.runtime_storage_inst.set_records(
- self._close_patch(cores, marks_patch['marks']))
-
- def _update_members_company_name(self):
- LOG.info('Update members with company names')
-
- def record_handler(record):
- if record['record_type'] != 'member':
- return
-
- company_draft = record['company_draft']
- company_name = self.domains_index.get(
- utils.normalize_company_name(company_draft)) or (
- utils.normalize_company_draft(company_draft))
-
- if company_name == record['company_name']:
- return
-
- LOG.debug('Update record %s, company name changed to %s',
- record, company_name)
- record['company_name'] = company_name
-
- yield record
-
- user = user_processor.load_user(self.runtime_storage_inst,
- user_id=record['user_id'])
- LOG.debug('Update user %s, company name changed to %s',
- user, company_name)
- user['companies'] = [{
- 'company_name': company_name,
- 'end_date': 0,
- }]
- user_processor.store_user(self.runtime_storage_inst, user)
-
- yield record_handler
-
- def _update_commits_with_module_alias(self):
- LOG.info('Update record with aliases')
-
- modules, alias_module_map = self._get_modules()
-
- def record_handler(record):
- if record['record_type'] != 'commit':
- return
-
- rec_module = record.get('module', None)
- if rec_module and rec_module in alias_module_map:
- record['module'] = alias_module_map[rec_module]
- yield record
-
- yield record_handler
-
- def post_processing(self, release_index):
- processors = [
- self._update_records_with_user_info,
- self._update_commits_with_merge_date,
- functools.partial(self._update_records_with_releases,
- release_index),
- self._update_commits_with_module_alias,
- self._update_blueprints_with_mention_info,
- self._determine_core_contributors,
- self._update_members_company_name,
- self._update_marks_with_disagreement,
- ]
-
- pipeline_processor = utils.make_pipeline_processor(processors)
-
- self.runtime_storage_inst.set_records(pipeline_processor(
- self.runtime_storage_inst.get_all_records))
diff --git a/stackalytics/processor/runtime_storage.py b/stackalytics/processor/runtime_storage.py
deleted file mode 100644
index 7ea9ffb53..000000000
--- a/stackalytics/processor/runtime_storage.py
+++ /dev/null
@@ -1,247 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import re
-
-import memcache
-from oslo_log import log as logging
-import six
-
-from stackalytics.processor import utils
-
-
-LOG = logging.getLogger(__name__)
-
-BULK_READ_SIZE = 64
-BULK_DELETE_SIZE = 4096
-RECORD_ID_PREFIX = 'record:'
-UPDATE_ID_PREFIX = 'update:'
-MEMCACHED_URI_PREFIX = r'^memcached:\/\/'
-
-
-class RuntimeStorage(object):
- def __init__(self, uri):
- pass
-
- def set_records(self, records_iterator):
- pass
-
- def apply_corrections(self, corrections_iterator):
- pass
-
- def get_by_key(self, key):
- pass
-
- def set_by_key(self, key, value):
- pass
-
- def get_update(self, pid):
- pass
-
- def active_pids(self, pids):
- pass
-
- def get_all_records(self):
- pass
-
-
-class MemcachedStorage(RuntimeStorage):
- def __init__(self, uri):
- super(MemcachedStorage, self).__init__(uri)
-
- stripped = re.sub(MEMCACHED_URI_PREFIX, '', uri)
- if stripped:
- storage_uri = stripped.split(',')
- self.memcached = memcache.Client(storage_uri)
- self._init_user_count()
- self.record_index = {}
- else:
- raise Exception('Invalid storage uri %s' % uri)
-
- def _build_index_lazily(self):
- if self.record_index:
- return
- for record in self.get_all_records():
- self.record_index[record['primary_key']] = record['record_id']
-
- def set_records(self, records_iterator, merge_handler=None):
- self._build_index_lazily()
- for record in records_iterator:
- if record['primary_key'] in self.record_index:
- # update
- record_id = self.record_index[record['primary_key']]
- if not merge_handler:
- record['record_id'] = record_id
- LOG.debug('Update record %s', record)
- self.set_by_key(self._get_record_name(record_id), record)
- else:
- original = self.get_by_key(self._get_record_name(
- record_id))
- if merge_handler(original, record):
- LOG.debug('Update record with merge %s', record)
- self.set_by_key(self._get_record_name(record_id),
- original)
- else:
- # insert record
- record_id = self._get_record_count()
- record['record_id'] = record_id
- self.record_index[record['primary_key']] = record_id
- LOG.debug('Insert new record %s', record)
- self.set_by_key(self._get_record_name(record_id), record)
- self._set_record_count(record_id + 1)
-
- self._commit_update(record_id)
-
- def apply_corrections(self, corrections_iterator):
- self._build_index_lazily()
- for correction in corrections_iterator:
- if correction['primary_key'] not in self.record_index:
- continue
-
- record_id = self.record_index[correction['primary_key']]
- original = self.get_by_key(self._get_record_name(record_id))
- need_update = False
-
- for field, value in six.iteritems(correction):
- if (field not in original) or (original[field] != value):
- need_update = True
- original[field] = value
-
- if need_update:
- self.set_by_key(self._get_record_name(record_id), original)
- self._commit_update(record_id)
-
- def inc_user_count(self):
- return self.memcached.incr('user:count')
-
- def get_all_users(self):
- for n in six.moves.range(0, self.get_by_key('user:count') + 1):
- user = self.get_by_key('user:%s' % n)
- if user:
- yield user
-
- def get_by_key(self, key):
- if six.PY2:
- key = key.encode('utf8')
- return self.memcached.get(key)
-
- def set_by_key(self, key, value):
- if six.PY2:
- key = key.encode('utf8')
- if not self.memcached.set(key, value):
- LOG.critical('Failed to store data in memcached: '
- 'key %(key)s, value %(value)s',
- {'key': key, 'value': value})
- raise Exception('Memcached set failed')
-
- def delete_by_key(self, key):
- if six.PY2:
- key = key.encode('utf8')
- if not self.memcached.delete(key):
- LOG.critical('Failed to delete data from memcached: key %s', key)
- raise Exception('Memcached delete failed')
-
- def get_update(self, pid):
- last_update = self.get_by_key('pid:%s' % pid)
- update_count = self._get_update_count()
-
- self.set_by_key('pid:%s' % pid, update_count)
- self._set_pids(pid)
-
- if not last_update:
- for i in self.get_all_records():
- yield i
- else:
- for update_id_set in utils.make_range(last_update, update_count,
- BULK_READ_SIZE):
- update_set = self.memcached.get_multi(
- update_id_set, UPDATE_ID_PREFIX).values()
- for i in self.memcached.get_multi(
- update_set, RECORD_ID_PREFIX).values():
- yield i
-
- def active_pids(self, pids):
- stored_pids = self.get_by_key('pids') or set()
- for pid in stored_pids:
- if pid not in pids:
- LOG.debug('Purge dead uwsgi pid %s from pids list', pid)
- self.delete_by_key('pid:%s' % pid)
-
- self.set_by_key('pids', pids)
-
- # remove unneeded updates
- min_update = self._get_update_count()
- for pid in pids:
- n = self.get_by_key('pid:%s' % pid)
- if n:
- if n < min_update:
- min_update = n
-
- first_valid_update = self.get_by_key('first_valid_update') or 0
- LOG.debug('Purge polled updates from %(first)s to %(min)s',
- {'first': first_valid_update, 'min': min_update})
-
- for delete_id_set in utils.make_range(first_valid_update, min_update,
- BULK_DELETE_SIZE):
- if not self.memcached.delete_multi(delete_id_set,
- key_prefix=UPDATE_ID_PREFIX):
- LOG.critical('Failed to delete_multi from memcached')
- raise Exception('Failed to delete_multi from memcached')
-
- self.set_by_key('first_valid_update', min_update)
-
- def _get_update_count(self):
- return self.get_by_key('update:count') or 0
-
- def _set_pids(self, pid):
- pids = self.get_by_key('pids') or set()
- if pid in pids:
- return
- pids.add(pid)
- self.set_by_key('pids', pids)
-
- def _get_record_name(self, record_id):
- return RECORD_ID_PREFIX + str(record_id)
-
- def _get_record_count(self):
- return self.get_by_key('record:count') or 0
-
- def _set_record_count(self, count):
- self.set_by_key('record:count', count)
-
- def get_all_records(self):
- for record_id_set in utils.make_range(0, self._get_record_count(),
- BULK_READ_SIZE):
- for i in self.memcached.get_multi(
- record_id_set, RECORD_ID_PREFIX).values():
- yield i
-
- def _commit_update(self, record_id):
- count = self._get_update_count()
- self.set_by_key(UPDATE_ID_PREFIX + str(count), record_id)
- self.set_by_key('update:count', count + 1)
-
- def _init_user_count(self):
- if not self.get_by_key('user:count'):
- self.set_by_key('user:count', 1)
-
-
-def get_runtime_storage(uri):
- LOG.debug('Runtime storage is requested for uri %s', uri)
- match = re.search(MEMCACHED_URI_PREFIX, uri)
- if match:
- return MemcachedStorage(uri)
- else:
- raise Exception('Unknown runtime storage uri %s' % uri)
diff --git a/stackalytics/processor/schema.py b/stackalytics/processor/schema.py
deleted file mode 100644
index 51f1f0f7c..000000000
--- a/stackalytics/processor/schema.py
+++ /dev/null
@@ -1,269 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-default_data = {
- "$schema": "http://json-schema.org/draft-04/schema#",
- "type": "object",
- "required": ["users", "releases", "companies", "repos", "project_types"],
- "properties": {
- "users": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "launchpad_id": {
- "type": "string",
- "pattern": "^[a-z\\d\\.\\+-]+$"
- },
- "github_id": {
- "type": "string"
- },
- "zanata_id": {
- "type": "string"
- },
- "user_name": {
- "type": "string"
- },
- "emails": {
- "type": "array",
- "items": {
- "type": "string",
- "pattern": ("^[a-zA-Z\\d_\\.\\+-]+@"
- "([a-z\\d\\.-]+\\.)"
- "*(([a-z]+)|\\(none\\))$")
- },
- "minItems": 1
- },
- "companies": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "company_name": {
- "type": "string"
- },
- "end_date": {
- "$ref": "#/definitions/date_format"
- }
- },
- "required": ["company_name", "end_date"],
- "additionalProperties": False
- },
- "minItems": 1
- }
- },
- "required": ["user_name", "emails"],
- "additionalProperties": False
- }
- },
- "releases": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "release_name": {
- "type": "string"
- },
- "end_date": {
- "$ref": "#/definitions/date_format"
- },
- "refs": {
- "type": "object"
- }
- },
- "required": ["release_name", "end_date"],
- "additionalProperties": False
- }
- },
- "repos": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "uri": {
- "type": "string"
- },
- "organization": {
- "type": "string"
- },
- "module": {
- "type": "string"
- },
- "releases": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "branch": {
- "type": "string"
- },
- "tag_from": {
- "type": "string"
- },
- "tag_to": {
- "type": "string"
- },
- "release_name": {
- "type": "string"
- }
- },
- "required": ["tag_from", "tag_to", "release_name"]
- }
- },
- "aliases": {
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- "default_branch": {
- "type": "string"
- }
- },
- "required": ["uri", "module", "organization"],
- "additionalProperties": False
- }
- },
- "companies": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "company_name": {
- "type": "string"
- },
- "domains": {
- "type": "array",
- "items": {
- "type": "string",
- "pattern": "^[a-z\\d\\.-]*$"
- }
- },
- "aliases": {
- "type": "array",
- "items": {
- "type": "string"
- }
- }
- },
- "required": ["company_name", "domains"],
- "additionalProperties": False
- }
- },
- "project_sources": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "organization": {
- "type": "string"
- },
- "uri": {
- "type": "string"
- },
- "git_base_uri": {
- "type": "string"
- },
- "ssh_key_filename": {
- "type": "string"
- },
- "ssh_username": {
- "type": "string"
- },
- "exclude": {
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- "default_branch": {
- "type": "string"
- },
- "module_group_id": {
- "type": "string"
- }
- },
- "required": ["organization"],
- "additionalProperties": False
- }
- },
- "module_groups": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "module_group_name": {
- "type": "string",
- "pattern": "^[\\w-]+$"
- },
- "modules": {
- "type": ["array"],
- "items": {
- "type": "string"
- }
- }
- },
- "required": ["module_group_name", "modules"],
- "additionalProperties": False
- }
- },
- "mail_lists": {
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- "member_lists": {
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- "project_types": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "id": {
- "type": "string",
- "pattern": "^[\\w:-]+$"
- },
- "child": {
- "type": "boolean"
- },
- "title": {
- "type": "string"
- },
- "modules": {
- "type": ["array"],
- "items": {
- "type": "string",
- "pattern": "^[\\w:-]+$"
- }
- }
- },
- "required": ["id", "title", "modules"],
- "additionalProperties": False
- }
- }
- },
- "definitions": {
- "date_format": {
- "type": ["string", "null"],
- "pattern": ("^20\\d{2}-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|"
- "Nov|Dec)-[0-3]\\d$")
- }
- }
-}
diff --git a/stackalytics/processor/user_processor.py b/stackalytics/processor/user_processor.py
deleted file mode 100644
index 117e109ed..000000000
--- a/stackalytics/processor/user_processor.py
+++ /dev/null
@@ -1,275 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import copy
-
-from oslo_log import log as logging
-
-from stackalytics.processor import utils
-
-LOG = logging.getLogger(__name__)
-
-INDEPENDENT = '*independent'
-ROBOTS = '*robots'
-
-
-def make_user_id(emails=None, launchpad_id=None, gerrit_id=None,
- member_id=None, github_id=None, zanata_id=None):
- if launchpad_id or emails:
- return launchpad_id or emails[0]
- if gerrit_id:
- return 'gerrit:%s' % gerrit_id
- if member_id:
- return 'member:%s' % member_id
- if github_id:
- return 'github:%s' % github_id
- if zanata_id:
- return 'zanata:%s' % zanata_id
- return None
-
-
-def store_user(runtime_storage_inst, user):
- if not user.get('seq'):
- user['seq'] = runtime_storage_inst.inc_user_count()
- LOG.debug('New user: %s', user)
-
- runtime_storage_inst.set_by_key('user:%d' % user['seq'], user)
- if user.get('user_id'):
- runtime_storage_inst.set_by_key('user:%s' % user['user_id'], user)
- if user.get('launchpad_id'):
- runtime_storage_inst.set_by_key('user:%s' % user['launchpad_id'], user)
- if user.get('gerrit_id'):
- runtime_storage_inst.set_by_key('user:gerrit:%s' % user['gerrit_id'],
- user)
- if user.get('github_id'):
- runtime_storage_inst.set_by_key('user:github:%s' % user['github_id'],
- user)
- if user.get('zanata_id'):
- runtime_storage_inst.set_by_key('user:zanata:%s' % user['zanata_id'],
- user)
- for email in user.get('emails') or []:
- runtime_storage_inst.set_by_key('user:%s' % email, user)
-
-
-def load_user(runtime_storage_inst, seq=None, user_id=None, email=None,
- launchpad_id=None, gerrit_id=None, member_id=None,
- github_id=None, zanata_id=None):
-
- key = make_user_id(gerrit_id=gerrit_id, member_id=member_id,
- github_id=github_id, zanata_id=zanata_id)
- if not key:
- key = seq or user_id or launchpad_id or email
- if key:
- return runtime_storage_inst.get_by_key('user:%s' % key)
- return None
-
-
-def delete_users(runtime_storage_inst, users):
- for user in users:
- LOG.debug('Delete user: %s', user)
- runtime_storage_inst.delete_by_key('user:%s' % user['seq'])
-
-
-def update_user_profile(stored_user, user):
- # update stored_user with user and return it
- if stored_user:
- updated_user = copy.deepcopy(stored_user)
- updated_user.update(user)
- updated_user['emails'] = list(set(stored_user.get('emails', [])) |
- set(user.get('emails', [])))
- else:
- updated_user = copy.deepcopy(user)
- updated_user['static'] = True
- return updated_user
-
-
-def get_company_for_date(companies, date):
- for r in companies:
- if date < r['end_date']:
- return r['company_name'], 'strict'
- return companies[-1]['company_name'], 'open' # may be overridden
-
-
-def get_company_by_email(domains_index, email):
- """Get company based on email domain
-
- Automatically maps email domain into company name. Prefers
- subdomains to root domains.
-
- :param domains_index: dict {domain -> company name}
- :param email: valid email. may be empty
- :return: company name or None if nothing matches
- """
- if not email:
- return None
-
- name, at, domain = email.partition('@')
- if domain:
- parts = domain.split('.')
- for i in range(len(parts), 1, -1):
- m = '.'.join(parts[len(parts) - i:])
- if m in domains_index:
- return domains_index[m]
- return None
-
-
-def create_user(domains_index, launchpad_id, email, gerrit_id, zanata_id,
- user_name):
- company = get_company_by_email(domains_index, email) or INDEPENDENT
- emails = [email] if email else []
-
- user = {
- 'user_id': make_user_id(
- emails=emails, launchpad_id=launchpad_id, gerrit_id=gerrit_id,
- zanata_id=zanata_id),
- 'launchpad_id': launchpad_id,
- 'user_name': user_name or '',
- 'companies': [{
- 'company_name': company,
- 'end_date': 0,
- }],
- 'emails': emails,
- }
-
- if gerrit_id:
- user['gerrit_id'] = gerrit_id
- if zanata_id:
- user['zanata_id'] = zanata_id
-
- return user
-
-
-def update_user_affiliation(domains_index, user):
- """Update user affiliation
-
- Affiliation is updated only if user is currently independent
- but makes contribution from company domain.
-
- :param domains_index: dict {domain -> company name}
- :param user: user profile
- """
- for email in user.get('emails'):
- company_name = get_company_by_email(domains_index, email)
-
- uc = user['companies']
- if (company_name and (len(uc) == 1) and
- (uc[0]['company_name'] == INDEPENDENT)):
- LOG.debug('Updating affiliation of user %s to %s',
- user['user_id'], company_name)
- uc[0]['company_name'] = company_name
- break
-
-
-def merge_user_profiles(domains_index, user_profiles):
- """Merge user profiles into one
-
- The function merges list of user profiles into one figures out which
- profiles can be deleted.
-
- :param domains_index: dict {domain -> company name}
- :param user_profiles: user profiles to merge
- :return: tuple (merged user profile, [user profiles to delete])
- """
- LOG.debug('Merge profiles: %s', user_profiles)
-
- # check of there are more than 1 launchpad_id nor gerrit_id
- lp_ids = set(u.get('launchpad_id') for u in user_profiles
- if u.get('launchpad_id'))
- if len(lp_ids) > 1:
- LOG.debug('Ambiguous launchpad ids: %s on profiles: %s',
- lp_ids, user_profiles)
- g_ids = set(u.get('gerrit_id') for u in user_profiles
- if u.get('gerrit_id'))
- if len(g_ids) > 1:
- LOG.debug('Ambiguous gerrit ids: %s on profiles: %s',
- g_ids, user_profiles)
-
- merged_user = {} # merged user profile
-
- # collect ordinary fields
- for key in ['seq', 'user_name', 'user_id', 'gerrit_id', 'github_id',
- 'launchpad_id', 'companies', 'static', 'zanata_id']:
- value = next((v.get(key) for v in user_profiles if v.get(key)),
- None)
- if value:
- merged_user[key] = value
-
- # update user_id, prefer it to be equal to launchpad_id
- merged_user['user_id'] = (merged_user.get('launchpad_id') or
- merged_user.get('user_id'))
-
- # always preserve `user_name` since its required field
- if 'user_name' not in merged_user:
- merged_user['user_name'] = merged_user['user_id']
-
- # merge emails
- emails = set([])
- core_in = set([])
- for u in user_profiles:
- emails |= set(u.get('emails', []))
- core_in |= set(u.get('core', []))
- merged_user['emails'] = list(emails)
- if core_in:
- merged_user['core'] = list(core_in)
-
- # merge companies
- merged_companies = merged_user['companies']
- for u in user_profiles:
- companies = u.get('companies')
- if companies:
- if (companies[0]['company_name'] != INDEPENDENT or
- len(companies) > 1):
- merged_companies = companies
- break
- merged_user['companies'] = merged_companies
-
- update_user_affiliation(domains_index, merged_user)
-
- users_to_delete = []
- seqs = set(u.get('seq') for u in user_profiles if u.get('seq'))
-
- if len(seqs) > 1:
- # profiles are merged, keep only one, remove others
- seqs.remove(merged_user['seq'])
-
- for u in user_profiles:
- if u.get('seq') in seqs:
- users_to_delete.append(u)
-
- return merged_user, users_to_delete
-
-
-def are_users_same(users):
- """True if all users are the same and not Nones"""
- x = set(u.get('seq') for u in users)
- return len(x) == 1 and None not in x
-
-
-def resolve_companies_aliases(domains_index, companies):
- norm_companies = []
-
- prev_company_name = None
- for c in reversed(companies):
- company_name = c['company_name']
- company_name = (domains_index.get(
- utils.normalize_company_name(company_name))
- or (utils.normalize_company_draft(company_name)))
-
- if company_name != prev_company_name:
- r = copy.deepcopy(c)
- r['company_name'] = company_name
- norm_companies.append(r)
-
- prev_company_name = company_name
-
- return list(reversed(norm_companies))
diff --git a/stackalytics/processor/utils.py b/stackalytics/processor/utils.py
deleted file mode 100644
index cf367ac42..000000000
--- a/stackalytics/processor/utils.py
+++ /dev/null
@@ -1,368 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import calendar
-import cgi
-import datetime
-import gzip
-import random
-import re
-import time
-
-import iso8601
-from oslo_config import cfg
-from oslo_log import log as logging
-import requests
-import requests_file
-import six
-import yaml
-
-
-CONF = cfg.CONF
-LOG = logging.getLogger(__name__)
-
-
-def init_config_and_logging(opts):
- CONF.register_cli_opts(opts)
- CONF.register_opts(opts)
- logging.register_options(CONF)
- logging.set_defaults()
-
- CONF(project='stackalytics')
-
- logging.setup(CONF, 'stackalytics')
- LOG.info('Logging enabled')
- CONF.log_opt_values(LOG, logging.DEBUG)
-
-
-def date_to_timestamp(d):
- if not d:
- return 0
- if d == 'now':
- return int(time.time())
- return int(time.mktime(
- datetime.datetime.strptime(d, '%Y-%b-%d').timetuple()))
-
-
-def date_to_timestamp_ext(d):
- try:
- return date_to_timestamp(d)
- except (ValueError, TypeError):
- return int(d)
-
-
-def member_date_to_timestamp(d):
- if not d:
- return 0
- return int(time.mktime(
- datetime.datetime.strptime(d, '%B %d, %Y ').timetuple()))
-
-
-def iso8601_to_timestamp(s):
- return calendar.timegm(iso8601.parse_date(s).utctimetuple())
-
-
-def timestamp_to_date(timestamp):
- return (datetime.datetime.fromtimestamp(timestamp).
- strftime('%Y-%b-%d'))
-
-
-def timestamp_to_week(timestamp):
- # Jan 4th 1970 is the first Sunday in the Epoch
- return (timestamp - 3 * 24 * 3600) // (7 * 24 * 3600)
-
-
-def week_to_date(week):
- timestamp = week * 7 * 24 * 3600 + 3 * 24 * 3600
- return (datetime.datetime.fromtimestamp(timestamp).
- strftime('%Y-%m-%d %H:%M:%S'))
-
-
-def timestamp_to_day(timestamp):
- return timestamp // (24 * 3600)
-
-
-def timestamp_to_utc_date(timestamp):
- return (datetime.datetime.fromtimestamp(timestamp).
- strftime('%Y-%m-%d'))
-
-
-def round_timestamp_to_day(timestamp):
- return (int(timestamp) // (24 * 3600)) * (24 * 3600)
-
-
-def check_email_validity(email):
- if email:
- return re.match(r'[\w\d_\.-\\+]+@([\w\d_\.-]+\.)+[\w]+', email)
- return False
-
-
-user_agents = [
- 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64) Gecko/20100101 Firefox/41.0',
- 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/600.8.9',
- 'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:41.0) Gecko/20100101 Firefox/41.0',
- 'Mozilla/5.0 (Macintosh; Intel Mac OS X) Chrome/45.0.2062.120',
- 'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko'
-]
-
-
-def _session_request(session, uri, method):
- session.mount('file://', requests_file.FileAdapter())
- user_agent = random.choice(user_agents)
-
- headers = {'User-Agent': user_agent, 'Accept': 'application/json'}
- return session.request(method, uri, headers=headers,
- timeout=CONF.read_timeout)
-
-
-def do_request(uri, method='get', session=None):
- if session:
- return _session_request(session, uri, method)
- else:
- with requests.Session() as session:
- return _session_request(session, uri, method)
-
-
-def read_uri(uri, session=None):
- try:
- return do_request(uri, session=session).text
- except Exception as e:
- LOG.warning('Error "%(error)s" retrieving uri %(uri)s',
- {'error': e, 'uri': uri})
-
-
-def read_json_from_uri(uri, session=None):
- try:
- return do_request(uri, session=session).json()
- except Exception as e:
- LOG.warning('Error "%(error)s" parsing json from uri %(uri)s',
- {'error': e, 'uri': uri})
-
-
-def read_yaml_from_uri(uri):
- try:
- return yaml.safe_load(read_uri(uri))
- except Exception as e:
- LOG.warning('Error "%(error)s" parsing yaml from uri %(uri)s',
- {'error': e, 'uri': uri})
-
-
-def _gzip_decompress(content):
- if six.PY3:
- return gzip.decompress(content).decode('utf8')
- else:
- gzip_fd = gzip.GzipFile(fileobj=six.moves.StringIO(content))
- return gzip_fd.read()
-
-
-def read_gzip_from_uri(uri):
- try:
- return _gzip_decompress(do_request(uri).content)
- except Exception as e:
- LOG.warning('Error "%(error)s" retrieving uri %(uri)s',
- {'error': e, 'uri': uri})
-
-
-def get_uri_last_modified(uri):
- try:
- return do_request(uri, method='head').headers['last-modified']
- except Exception as e:
- LOG.warning('Error "%(error)s" retrieving uri %(uri)s',
- {'error': e, 'uri': uri})
-
-
-def cmp_to_key(mycmp): # ported from python 3
- """Convert a cmp= function into a key= function."""
- class K(object):
- __slots__ = ['obj']
-
- def __init__(self, obj):
- self.obj = obj
-
- def __lt__(self, other):
- return mycmp(self.obj, other.obj) < 0
-
- def __gt__(self, other):
- return mycmp(self.obj, other.obj) > 0
-
- def __eq__(self, other):
- return mycmp(self.obj, other.obj) == 0
-
- def __le__(self, other):
- return mycmp(self.obj, other.obj) <= 0
-
- def __ge__(self, other):
- return mycmp(self.obj, other.obj) >= 0
-
- def __ne__(self, other):
- return mycmp(self.obj, other.obj) != 0
-
- __hash__ = None
- return K
-
-
-def make_range(start, stop, step):
- last_full = stop - ((stop - start) % step)
- for i in six.moves.range(start, last_full, step):
- yield six.moves.range(i, i + step)
- if stop > last_full:
- yield six.moves.range(last_full, stop)
-
-
-def load_repos(runtime_storage_inst):
- return runtime_storage_inst.get_by_key('repos') or []
-
-
-def unwrap_text(text):
- res = ''
- for line in text.splitlines():
- s = line.rstrip()
- if not s:
- continue
- res += line
- if (not s[0].isalpha()) or (s[-1] in ['.', '!', '?', '>', ':', ';']):
- res += '\n'
- else:
- res += ' '
- return res.rstrip()
-
-
-def format_text(s):
- s = cgi.escape(re.sub(re.compile('\n{2,}', flags=re.MULTILINE), '\n', s))
-
- def replace_dots(match_obj):
- return re.sub(r'([\./]+)', r'\1', match_obj.group(0))
-
- s = re.sub(r'((?:\w+[\./]+)+\w+)', replace_dots, s)
- return s
-
-
-def make_age_string(seconds):
- days = seconds / (3600 * 24)
- hours = (seconds / 3600) - (days * 24)
- return '%d days and %d hours' % (days, hours)
-
-
-def merge_records(original, new):
- need_update = False
- for key, value in six.iteritems(new):
- if original.get(key) != value:
- need_update = True
- original[key] = value
- return need_update
-
-
-def get_blueprint_id(module, name):
- return module + ':' + name
-
-
-def make_bug_id(bug_id, module, release=None):
- if release:
- return '/'.join([module, release, bug_id])
- else:
- return '/'.join([module, bug_id])
-
-
-def get_patch_id(review_id, patch_number):
- return '%s:%s' % (review_id, patch_number)
-
-
-def add_index(sequence, start=1, item_filter=lambda x: True):
- n = start
- for item in sequence:
- if item_filter(item):
- item['index'] = n
- n += 1
- else:
- item['index'] = ''
- return sequence
-
-
-def safe_encode(s):
- return six.moves.urllib.parse.quote(s.encode('utf-8'))
-
-
-def keep_safe_chars(s):
- return re.sub(r'[^\x21-\x7e\x80-\xff]+', '', s)
-
-
-def make_module_group(module_group_id, name=None, modules=None, tag='module'):
- return {'id': module_group_id,
- 'module_group_name': name or module_group_id,
- 'modules': modules or {module_group_id},
- 'tag': tag}
-
-BAD_NAME_SUFFIXES = ['Ltd', 'Pvt', 'Inc', 'GmbH', 'AG', 'Corporation', 'Corp',
- 'Company', 'Co', 'Group', 'Srl', 'Limited', 'LLC', 'IT']
-
-BAD_NAME_SUFFIXES_WITH_STOPS = ['S.p.A.', 's.r.o.', 'L.P.', 'B.V.', 'K.K.',
- 'd.o.o.']
-
-
-def normalize_company_name(name):
- regex = '(\\b(' + '|'.join(BAD_NAME_SUFFIXES) + ')\\b)'
- regex += '|' + '((^|\\s)(' + '|'.join(BAD_NAME_SUFFIXES_WITH_STOPS) + '))'
- name = re.sub(re.compile(regex, re.IGNORECASE), '', name)
- return ''.join([c.lower() for c in name if c.isalnum()])
-
-
-def normalize_company_draft(name):
- name = re.sub(',', ' ', name)
- name = re.sub(r'\s+', ' ', name)
- return name
-
-
-def validate_lp_display_name(lp_profile):
- if lp_profile:
- if "" == lp_profile['display_name']:
- lp_profile['display_name'] = lp_profile['name']
-
-
-def make_pipeline_processor(processors):
-
- def get_passes(_processors):
- # every processor yields one or more record handlers
- # this function groups record handlers by pass and returns list of them
- processor_generators = [p() for p in _processors]
-
- work = True
- while work:
- work = False
- record_handlers = []
-
- for generator in processor_generators:
- try:
- record_handlers.append(next(generator))
- except StopIteration:
- pass
-
- if record_handlers:
- work = True
- yield record_handlers
-
- def pipeline_processor(record_generator):
-
- # for every pass
- for one_pass in get_passes(processors):
- # iterate every record in producer
- for record in record_generator():
- # iterate over record handlers within single pass
- for record_handler in one_pass:
- # feed record to the handler
- for r in record_handler(record) or []:
- # yield processed record
- yield r
-
- return pipeline_processor
diff --git a/stackalytics/processor/vcs.py b/stackalytics/processor/vcs.py
deleted file mode 100644
index bb98daf95..000000000
--- a/stackalytics/processor/vcs.py
+++ /dev/null
@@ -1,305 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import re
-import shutil
-
-from oslo_log import log as logging
-import sh
-import six
-
-from stackalytics.processor import utils
-
-
-LOG = logging.getLogger(__name__)
-
-
-class Vcs(object):
- """Base object for Version Control System"""
-
- def __init__(self, repo, sources_root):
- self.repo = repo
- self.sources_root = sources_root
- if not os.path.exists(sources_root):
- os.mkdir(sources_root)
- else:
- if not os.access(sources_root, os.W_OK):
- raise Exception('Sources root folder %s is not writable' %
- sources_root)
-
- def fetch(self):
- pass
-
- def log(self, branch, head_commit_id):
- pass
-
- def get_last_id(self, branch):
- pass
-
-
-GIT_LOG_PARAMS = [
- ('commit_id', '%H'),
- ('date', '%at'),
- ('author_name', '%an'),
- ('author_email', '%ae'),
- ('subject', '%s'),
- ('message', '%b'),
-]
-GIT_LOG_FORMAT = ''.join([(r[0] + ':' + r[1] + '%n')
- for r in GIT_LOG_PARAMS]) + 'diff_stat:'
-DIFF_STAT_PATTERN = ('[^\d]+(\d+)\s+[^\s]*\s+changed'
- '(,\s+(\d+)\s+([^\d\s]*)\s+(\d+)?)?')
-GIT_LOG_PATTERN = re.compile(''.join([(r[0] + ':(.*?)\n')
- for r in GIT_LOG_PARAMS]) +
- 'diff_stat:(?P.+?)(?=commit|\Z)',
- re.DOTALL)
-
-CO_AUTHOR_PATTERN_RAW = ('(?P.*?)\s*'
- '(?P[\w\.-]+@[\w\.-]+)>?')
-CO_AUTHOR_PATTERN = re.compile(CO_AUTHOR_PATTERN_RAW, re.IGNORECASE)
-
-MESSAGE_PATTERNS = {
- 'bug_id': re.compile(r'bug[\s#:]*(?P\d+)', re.IGNORECASE),
- 'blueprint_id': re.compile(r'\b(?:blueprint|bp)\b[ \t]*[#:]?[ \t]*'
- r'(?P[a-z0-9-]+)', re.IGNORECASE),
- 'change_id': re.compile('Change-Id: (?PI[0-9a-f]{40})', re.IGNORECASE),
- 'coauthor': re.compile(r'(?:Co-Authored-By|Also-By|Co-Author):'
- r'\s*(?P%s)\s' % CO_AUTHOR_PATTERN_RAW,
- re.IGNORECASE)
-}
-
-
-class Git(Vcs):
-
- def __init__(self, repo, sources_root):
- super(Git, self).__init__(repo, sources_root)
- uri = self.repo['uri']
- match = re.search(r'([^/]+)\.git$', uri)
- if match:
- self.folder = os.path.normpath(self.sources_root + '/' +
- match.group(1))
- else:
- raise Exception('Unexpected uri %s for git' % uri)
- self.release_index = {}
-
- def _checkout(self, branch):
- try:
- sh.git('clean', '-d', '--force')
- sh.git('reset', '--hard')
- sh.git('checkout', 'origin/' + branch)
- return True
- except sh.ErrorReturnCode:
- LOG.error('Unable to checkout branch %(branch)s from repo '
- '%(uri)s. Ignore it',
- {'branch': branch, 'uri': self.repo['uri']},
- exc_info=True)
- return False
-
- def fetch(self):
- LOG.debug('Fetching repo uri %s', self.repo['uri'])
-
- if os.path.exists(self.folder):
- os.chdir(self.folder)
- try:
- uri = str(
- sh.git('config', '--get', 'remote.origin.url')).strip()
- except sh.ErrorReturnCode:
- LOG.error('Unable to get config for git repo %s. Ignore it',
- self.repo['uri'], exc_info=True)
- return {}
-
- if uri != self.repo['uri']:
- LOG.warning('Repo uri %(uri)s differs from cloned %(old)s',
- {'uri': self.repo['uri'], 'old': uri})
- os.chdir('..')
- shutil.rmtree(self.folder)
-
- if not os.path.exists(self.folder):
- os.chdir(self.sources_root)
- try:
- sh.git('clone', self.repo['uri'])
- os.chdir(self.folder)
- except sh.ErrorReturnCode:
- LOG.error('Unable to clone git repo %s. Ignore it',
- self.repo['uri'], exc_info=True)
- else:
- os.chdir(self.folder)
- try:
- sh.git('fetch')
- except sh.ErrorReturnCode:
- LOG.error('Unable to fetch git repo %s. Ignore it',
- self.repo['uri'], exc_info=True)
-
- return self._get_release_index()
-
- def _get_release_index(self):
- if not os.path.exists(self.folder):
- return {}
-
- LOG.debug('Get release index for repo uri: %s', self.repo['uri'])
- os.chdir(self.folder)
- if not self.release_index:
- for release in self.repo.get('releases', []):
- release_name = release['release_name'].lower()
-
- if 'branch' in release:
- branch = release['branch']
- else:
- branch = 'master'
- if not self._checkout(branch):
- continue
-
- if 'tag_from' in release:
- tag_range = release['tag_from'] + '..' + release['tag_to']
- else:
- tag_range = release['tag_to']
-
- try:
- git_log_iterator = sh.git('log', '--pretty=%H', tag_range,
- _tty_out=False)
- for commit_id in git_log_iterator:
- self.release_index[commit_id.strip()] = release_name
- except sh.ErrorReturnCode:
- LOG.error('Unable to get log of git repo %s. Ignore it',
- self.repo['uri'], exc_info=True)
- return self.release_index
-
- def log(self, branch, head_commit_id):
- LOG.debug('Parsing git log for repo uri %s', self.repo['uri'])
-
- os.chdir(self.folder)
- if not self._checkout(branch):
- return
-
- commit_range = 'HEAD'
- if head_commit_id:
- commit_range = head_commit_id + '..HEAD'
-
- try:
- output = sh.git('log', '--pretty=' + GIT_LOG_FORMAT, '--shortstat',
- '-M', '--no-merges', commit_range, _tty_out=False,
- _decode_errors='ignore', _encoding='utf8')
- except sh.ErrorReturnCode:
- LOG.error('Unable to get log of git repo %s. Ignore it',
- self.repo['uri'], exc_info=True)
- return
-
- for rec in re.finditer(GIT_LOG_PATTERN, six.text_type(output)):
- i = 1
- commit = {}
- for param in GIT_LOG_PARAMS:
- commit[param[0]] = rec.group(i)
- i += 1
-
- # ignore machine/script produced submodule auto updates
- if commit['subject'] == u'Update git submodules':
- continue
-
- if not commit['author_email']:
- # ignore commits with empty email (there are some < Essex)
- continue
-
- commit['author_email'] = utils.keep_safe_chars(
- commit['author_email'])
-
- diff_stat_str = rec.group('diff_stat')
- diff_rec = re.search(DIFF_STAT_PATTERN, diff_stat_str)
-
- if diff_rec:
- files_changed = int(diff_rec.group(1))
- lines_changed_group = diff_rec.group(2)
- lines_changed = diff_rec.group(3)
- deleted_or_inserted = diff_rec.group(4)
- lines_deleted = diff_rec.group(5)
-
- if lines_changed_group: # there inserted or deleted lines
- if not lines_deleted:
- if deleted_or_inserted[0] == 'd': # deleted
- lines_deleted = lines_changed
- lines_changed = 0
- else:
- files_changed = 0
- lines_changed = 0
- lines_deleted = 0
-
- commit['files_changed'] = files_changed
- commit['lines_added'] = int(lines_changed or 0)
- commit['lines_deleted'] = int(lines_deleted or 0)
-
- for pattern_name, pattern in six.iteritems(MESSAGE_PATTERNS):
- collection = set()
- for item in re.finditer(pattern, commit['message']):
- collection.add(item.group('id'))
- if collection:
- commit[pattern_name] = list(collection)
-
- commit['date'] = int(commit['date'])
- commit['module'] = self.repo['module']
- commit['branches'] = set([branch])
- if commit['commit_id'] in self.release_index:
- commit['release'] = self.release_index[commit['commit_id']]
- else:
- commit['release'] = None
-
- if commit['release'] == 'ignored':
- # drop commits that are marked by 'ignored' release
- continue
-
- if 'blueprint_id' in commit:
- commit['blueprint_id'] = [(commit['module'] + ':' + bp_name)
- for bp_name
- in commit['blueprint_id']]
-
- if 'coauthor' in commit:
- verified_coauthors = []
- for coauthor in commit['coauthor']:
- m = re.match(CO_AUTHOR_PATTERN, coauthor)
- if m and utils.check_email_validity(
- m.group("author_email")):
- verified_coauthors.append(m.groupdict())
-
- if verified_coauthors:
- commit['coauthor'] = verified_coauthors
- else:
- del commit['coauthor'] # no valid authors
-
- yield commit
-
- def get_last_id(self, branch):
- LOG.debug('Get head commit for repo uri: %s', self.repo['uri'])
-
- os.chdir(self.folder)
- if not self._checkout(branch):
- return None
-
- try:
- return str(sh.git('rev-parse', 'HEAD')).strip()
- except sh.ErrorReturnCode:
- LOG.error('Unable to get HEAD for git repo %s. Ignore it',
- self.repo['uri'], exc_info=True)
-
- return None
-
-
-def get_vcs(repo, sources_root):
- uri = repo['uri']
- LOG.debug('Factory is asked for VCS uri: %s', uri)
- match = re.search(r'\.git$', uri)
- if match:
- return Git(repo, sources_root)
- else:
- LOG.warning('Unsupported VCS, fallback to dummy')
- return Vcs(repo, uri)
diff --git a/stackalytics/processor/zanata.py b/stackalytics/processor/zanata.py
deleted file mode 100644
index 270f9db08..000000000
--- a/stackalytics/processor/zanata.py
+++ /dev/null
@@ -1,99 +0,0 @@
-# Copyright (c) 2016 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-import time
-
-import itertools
-from oslo_log import log as logging
-import requests
-
-from stackalytics.processor import utils
-
-
-LOG = logging.getLogger(__name__)
-
-DAY = 24 * 60 * 60
-WEEK = 7 * DAY
-
-ZANATA_URI = 'https://translate.openstack.org/rest/%s'
-ZANATA_FIRST_RECORD = '2015-08-31' # must be Monday
-
-zanata_session = requests.Session()
-
-
-def _zanata_get_user_stats(zanata_user_id, start_date, end_date):
- uri = ZANATA_URI % ('stats/user/%s/%s..%s' % (zanata_user_id,
- start_date, end_date))
- return utils.read_json_from_uri(uri, session=zanata_session)
-
-
-def _timestamp_to_date(timestamp):
- return datetime.datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d')
-
-
-def _date_to_timestamp(d):
- return int(time.mktime(
- datetime.datetime.strptime(d, '%Y-%m-%d').timetuple()))
-
-
-def log(runtime_storage_inst, translation_team_uri):
-
- last_update_key = 'zanata:last_update'
- last_update = int(runtime_storage_inst.get_by_key(last_update_key) or
- _date_to_timestamp(ZANATA_FIRST_RECORD))
- LOG.info('Last update: %d', last_update)
- now = int(time.time())
-
- LOG.info('Reading translation team from uri: %s', translation_team_uri)
- translation_team = utils.read_yaml_from_uri(translation_team_uri)
-
- if not translation_team:
- LOG.warning('Translation team data is not available')
- return
-
- user_ids = set(u['zanata_id'] for u in runtime_storage_inst.get_all_users()
- if 'zanata_id' in u)
- user_ids |= set(itertools.chain.from_iterable(
- team.get('translators', []) for team in translation_team.values()))
-
- for user_id in user_ids:
- for day in range(last_update, now, WEEK):
- day_str = _timestamp_to_date(day)
- end_str = _timestamp_to_date(day + WEEK - DAY)
- user_stats = _zanata_get_user_stats(user_id, day_str, end_str)
- if user_stats:
- for user_stats_item in user_stats:
- # Currently we only count translated words
- if user_stats_item['savedState'] == 'Translated':
- record = dict(
- zanata_id=user_id,
- date=_date_to_timestamp(
- user_stats_item['savedDate']),
- language_code=user_stats_item['localeId'],
- language=user_stats_item['localeDisplayName'],
- # Todo: not always consistent to the official name
- module=user_stats_item['projectSlug'],
- # Since Zanata does not support '/' character
- # in project version names, i18n uses '-' instead
- # of '/' for branch names.
- branch=user_stats_item['versionSlug'].replace(
- '-', '/'),
- translated=user_stats_item['wordCount'],
- )
- yield record
- last_update += (now - last_update) // WEEK * WEEK
- LOG.info('New last update: %d', last_update)
- runtime_storage_inst.set_by_key(last_update_key, last_update)
diff --git a/stackalytics/tests/__init__.py b/stackalytics/tests/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/stackalytics/tests/api/__init__.py b/stackalytics/tests/api/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/stackalytics/tests/api/test_api.py b/stackalytics/tests/api/test_api.py
deleted file mode 100644
index 907fd4ffc..000000000
--- a/stackalytics/tests/api/test_api.py
+++ /dev/null
@@ -1,179 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import contextlib
-import itertools
-import json
-
-import mock
-from oslo_utils import uuidutils
-import six
-import testtools
-
-from stackalytics.dashboard import web
-from stackalytics.processor import runtime_storage
-
-
-class TestAPI(testtools.TestCase):
-
- def setUp(self):
- super(TestAPI, self).setUp()
- self.app = web.app.test_client()
-
-
-@contextlib.contextmanager
-def make_runtime_storage(data, *generators):
- _add_generated_records(data, *generators)
-
- runtime_storage_inst = TestStorage(data)
- setattr(web.app, 'stackalytics_vault', None)
-
- with mock.patch('stackalytics.processor.runtime_storage.'
- 'get_runtime_storage') as get_runtime_storage_mock:
- get_runtime_storage_mock.return_value = runtime_storage_inst
- try:
- yield runtime_storage_inst
- finally:
- pass
-
-
-def make_records(**kwargs):
- GENERATORS = {
- 'commit': _generate_commits,
- 'mark': _generate_marks,
- 'review': _generate_review,
- }
-
- def generate_records():
- for record_type in kwargs.get('record_type', []):
- if record_type in GENERATORS.keys():
- for values in algebraic_product(**kwargs):
- record = next(GENERATORS[record_type]())
- record.update(values)
- yield record
-
- return generate_records
-
-
-def make_module(module_name):
- return {'id': module_name,
- 'module_group_name': module_name,
- 'modules': [module_name],
- 'tag': 'module'}
-
-
-class TestStorage(runtime_storage.RuntimeStorage):
-
- def __init__(self, data):
- super(TestStorage, self).__init__('test://')
- self.data = data
-
- def get_update(self, pid):
- for record in self.get_all_records():
- yield record
-
- def get_by_key(self, key):
- return self.data.get(key)
-
- def set_by_key(self, key, value):
- super(TestStorage, self).set_by_key(key, value)
-
- def get_all_records(self):
- for n in range(self.get_by_key('record:count') or 0):
- record = self.get_by_key('record:%s' % n)
- if record:
- yield record
-
-
-def _generate_commits():
- commit = {
- 'commit_id': uuidutils.generate_uuid(),
- 'lines_added': 9, 'module': 'nova', 'record_type': 'commit',
- 'message': 'Closes bug 1212953\n\nChange-Id: '
- 'I33f0f37b6460dc494abf2520dc109c9893ace9e6\n',
- 'subject': 'Fixed affiliation of Edgar and Sumit', 'loc': 10,
- 'user_id': 'john_doe',
- 'primary_key': uuidutils.generate_uuid(),
- 'author_email': 'john_doe@ibm.com', 'company_name': 'IBM',
- 'lines_deleted': 1, 'week': 2275,
- 'blueprint_id': None, 'bug_id': u'1212953',
- 'files_changed': 1, 'author_name': u'John Doe',
- 'date': 1376737923, 'launchpad_id': u'john_doe',
- 'branches': set([u'master']),
- 'change_id': u'I33f0f37b6460dc494abf2520dc109c9893ace9e6',
- 'release': u'icehouse'
- }
- yield commit
-
-
-def _generate_marks():
- mark = {
- 'launchpad_id': 'john_doe', 'week': 2294, 'user_id': 'john_doe',
- 'description': 'Approved', 'author_name': 'John Doe',
- 'author_email': 'john_doe@gmail.com',
- 'primary_key': uuidutils.generate_uuid() + 'Workflow',
- 'module': 'glance', 'patch': 2, 'record_type': 'mark',
- 'company_name': '*independent', 'branch': 'master',
- 'date': 1387860458, 'record_id': 37184, 'release': 'icehouse',
- 'value': 1, 'type': 'Workflow',
- 'review_id': uuidutils.generate_uuid()}
- yield mark
-
-
-def _generate_review():
- yield {
- 'status': 'NEW', 'review_number': 6, 'number': '60721',
- 'module': 'glance', 'topic': 'bug/1258999', 'record_type': 'review',
- 'value': -2, 'open': True,
- 'id': uuidutils.generate_uuid(),
- 'subject': 'Adding missing copy_from policy from policy.json',
- 'user_id': 'john_doe',
- 'primary_key': 'Ibc0d1fa7626629c28c514514a985a6b89db2ac69',
- 'author_email': 'john_doe@gmail.com', 'company_name': '*independent',
- 'branch': 'master',
- 'launchpad_id': 'john_doe', 'lastUpdated': 1387865203,
- 'author_name': 'John Doe', 'date': 1386547707,
- 'url': 'https://review.openstack.org/60721',
- 'sortKey': '0029f92e0000ed31', 'project': 'openstack/glance',
- 'week': 2292, 'release': 'icehouse', 'updated_on': 1387865147
- }
-
-
-def _add_generated_records(data, *generators):
- count = 0
- for gen in generators:
- for record in gen():
- record['record_id'] = count
- data['record:%s' % count] = record
- count += 1
- data['record:count'] = count
-
-
-def algebraic_product(**kwargs):
- position_to_key = {}
- values = []
- for key, value in six.iteritems(kwargs):
- position_to_key[len(values)] = key
- values.append(value)
-
- for chain in itertools.product(*values):
- result = {}
- for position, key in six.iteritems(position_to_key):
- result[key] = chain[position]
- yield result
-
-
-def load_json(api_response):
- return json.loads(api_response.data.decode('utf8'))
diff --git a/stackalytics/tests/api/test_companies.py b/stackalytics/tests/api/test_companies.py
deleted file mode 100644
index 6a0579786..000000000
--- a/stackalytics/tests/api/test_companies.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from stackalytics.tests.api import test_api
-
-
-class TestAPICompanies(test_api.TestAPI):
-
- def test_get_companies(self):
- with test_api.make_runtime_storage(
- {
- 'repos': [
- {'module': 'nova', 'project_type': 'openstack',
- 'organization': 'openstack',
- 'uri': 'git://git.openstack.org/openstack/nova.git'},
- {'module': 'glance', 'project_type': 'openstack',
- 'organization': 'openstack',
- 'uri': 'git://git.openstack.org/openstack/glance.git'}
- ],
- 'project_types': [
- {'id': 'openstack', 'title': 'OpenStack',
- 'modules': ['nova', 'glance']}],
- 'releases': [{'release_name': 'prehistory',
- 'end_date': 1234567890},
- {'release_name': 'icehouse',
- 'end_date': 1234567890}],
- 'module_groups': {
- 'openstack': {'module_group_name': 'openstack',
- 'modules': ['nova', 'glance']},
- 'nova': {'module_group_name': 'nova',
- 'modules': ['nova']},
- 'glance': {'module_group_name': 'glance',
- 'modules': ['glance']},
- }},
- test_api.make_records(record_type=['commit'],
- loc=[10, 20, 30],
- module=['glance'],
- company_name=['NEC', 'IBM', 'NTT']),
- test_api.make_records(record_type=['review'],
- primary_key=['0123456789', '9876543210'],
- module=['glance'],
- company_name=['IBM']),
- test_api.make_records(record_type=['mark'],
- review_id=['0123456789', '9876543210'],
- module=['glance'],
- company_name=['IBM']),
- test_api.make_records(record_type=['mark'],
- review_id=['0123456789'],
- module=['glance'],
- company_name=['NEC'])):
-
- response = self.app.get('/api/1.0/companies?metric=commits&'
- 'module=glance')
- companies = test_api.load_json(response)['data']
- self.assertEqual([{'id': 'ibm', 'text': 'IBM'},
- {'id': 'nec', 'text': 'NEC'},
- {'id': 'ntt', 'text': 'NTT'}], companies)
-
- response = self.app.get('/api/1.0/companies?metric=marks&'
- 'module=glance')
- companies = test_api.load_json(response)['data']
- self.assertEqual([{'id': 'ibm', 'text': 'IBM'},
- {'id': 'nec', 'text': 'NEC'}], companies)
-
- def test_get_company(self):
- with test_api.make_runtime_storage(
- {
- 'repos': [
- {'module': 'nova', 'project_type': 'openstack',
- 'organization': 'openstack',
- 'uri': 'git://git.openstack.org/openstack/nova.git'},
- {'module': 'glance', 'project_type': 'openstack',
- 'organization': 'openstack',
- 'uri': 'git://git.openstack.org/openstack/glance.git'}
- ],
- 'module_groups': {
- 'nova': test_api.make_module('nova'),
- 'glance': test_api.make_module('glance'),
- },
- 'releases': [{'release_name': 'prehistory',
- 'end_date': 1234567890},
- {'release_name': 'icehouse',
- 'end_date': 1234567890}],
- 'project_types': [
- {'id': 'all', 'title': 'All',
- 'modules': ['nova', 'glance', 'nova-cli']},
- {'id': 'openstack', 'title': 'OpenStack',
- 'modules': ['nova', 'glance']}]},
- test_api.make_records(record_type=['commit'],
- loc=[10, 20, 30],
- module=['glance'],
- company_name=['NEC', 'IBM', 'NTT'])):
-
- response = self.app.get('/api/1.0/companies/nec?module=glance')
- company = test_api.load_json(response)['company']
- self.assertEqual({'id': 'nec', 'text': 'NEC'}, company)
-
- response = self.app.get('/api/1.0/companies/google?module=glance')
- self.assertEqual(404, response.status_code)
diff --git a/stackalytics/tests/api/test_modules.py b/stackalytics/tests/api/test_modules.py
deleted file mode 100644
index e4ad64b59..000000000
--- a/stackalytics/tests/api/test_modules.py
+++ /dev/null
@@ -1,122 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from stackalytics.tests.api import test_api
-
-
-class TestAPIModules(test_api.TestAPI):
-
- def test_get_modules(self):
- with test_api.make_runtime_storage(
- {
- 'repos': [
- {'module': 'nova', 'organization': 'openstack',
- 'uri': 'git://git.openstack.org/openstack/nova.git'},
- {'module': 'glance', 'organization': 'openstack',
- 'uri': 'git://git.openstack.org/openstack/glance.git'}
- ],
- 'module_groups': {
- 'nova-group': {'id': 'nova-group',
- 'module_group_name': 'nova-group',
- 'modules': ['nova', 'nova-cli'],
- 'tag': 'group'},
- 'nova': test_api.make_module('nova'),
- 'nova-cli': test_api.make_module('nova-cli'),
- 'glance': test_api.make_module('glance'),
- },
- 'releases': [
- {'release_name': 'prehistory', 'end_date': 1234567890},
- {'release_name': 'icehouse', 'end_date': 1234567890}],
- 'project_types': [{'id': 'all', 'title': 'All',
- 'modules': ['nova', 'glance',
- 'nova-cli']},
- {'id': 'integrated',
- 'title': 'Integrated',
- 'modules': ['nova', 'glance']}]},
- test_api.make_records(record_type=['commit'],
- module=['glance', 'nova', 'nova-cli'])):
-
- response = self.app.get('/api/1.0/modules?'
- 'project_type=all&metric=commits')
- modules = test_api.load_json(response)['data']
- self.assertEqual(
- [{'id': 'glance', 'text': 'glance', 'tag': 'module'},
- {'id': 'nova', 'text': 'nova', 'tag': 'module'},
- {'id': 'nova-cli', 'text': 'nova-cli', 'tag': 'module'},
- {'id': 'nova-group', 'text': 'nova-group', 'tag': 'group'}],
- modules,
- message='Expected modules belonging to project type plus '
- 'module groups that are completely within '
- 'project type')
-
- response = self.app.get('/api/1.0/modules?module=nova-group&'
- 'project_type=integrated&metric=commits')
- modules = test_api.load_json(response)['data']
- self.assertEqual(
- [{'id': 'glance', 'text': 'glance', 'tag': 'module'},
- {'id': 'nova', 'text': 'nova', 'tag': 'module'},
- {'id': 'nova-group', 'text': 'nova-group', 'tag': 'group'}],
- modules,
- message='Expected modules belonging to project type plus '
- 'module groups that are completely within '
- 'project type')
-
- def test_get_module(self):
- with test_api.make_runtime_storage(
- {
- 'repos': [
- {'module': 'nova', 'organization': 'openstack',
- 'uri': 'git://git.openstack.org/openstack/nova.git'}],
- 'module_groups': {
- 'nova-group': {'id': 'nova-group',
- 'module_group_name': 'nova-group',
- 'modules': ['nova-cli', 'nova'],
- 'tag': 'group'},
- 'nova': test_api.make_module('nova'),
- 'nova-cli': test_api.make_module('nova-cli'),
- },
- 'releases': [{'release_name': 'prehistory',
- 'end_date': 1234567890},
- {'release_name': 'icehouse',
- 'end_date': 1234567890}],
- 'project_types': [
- {'id': 'all', 'title': 'All',
- 'modules': ['nova', 'glance', 'nova-cli']},
- {'id': 'openstack', 'title': 'OpenStack',
- 'modules': ['nova', 'glance']}]},
- test_api.make_records(record_type=['commit'])):
-
- response = self.app.get('/api/1.0/modules/nova')
- module = test_api.load_json(response)['module']
- self.assertEqual(
- {'id': 'nova',
- 'modules': [
- {'module_name': 'nova',
- 'visible': True,
- 'repo_uri': 'git://git.openstack.org/openstack/nova.git'}
- ],
- 'name': 'Nova', 'tag': 'module'}, module)
-
- response = self.app.get('/api/1.0/modules/nova-group')
- module = test_api.load_json(response)['module']
- self.assertEqual(
- {'id': 'nova-group',
- 'modules': [{
- 'module_name': 'nova',
- 'visible': True,
- 'repo_uri': 'git://git.openstack.org/openstack/nova.git'},
- {'module_name': 'nova-cli', 'visible': False},
- ],
- 'name': 'Nova-group', 'tag': 'group'}, module)
diff --git a/stackalytics/tests/api/test_releases.py b/stackalytics/tests/api/test_releases.py
deleted file mode 100644
index 72374411f..000000000
--- a/stackalytics/tests/api/test_releases.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from stackalytics.tests.api import test_api
-
-
-class TestAPIReleases(test_api.TestAPI):
-
- def test_releases(self):
- with test_api.make_runtime_storage(
- {'releases': [
- {'release_name': 'prehistory', 'end_date': 1365033600},
- {'release_name': 'havana', 'end_date': 1381968000},
- {'release_name': 'icehouse', 'end_date': 1397692800}],
- 'project_types': [
- {'id': 'all', 'title': 'All',
- 'modules': ['nova', 'glance', 'nova-cli']},
- {'id': 'openstack', 'title': 'OpenStack',
- 'modules': ['nova', 'glance']}]},
- test_api.make_records(record_type=['commit'])):
- response = self.app.get('/api/1.0/releases')
- releases = test_api.load_json(response)['data']
- self.assertEqual(3, len(releases))
- self.assertIn({'id': 'all', 'text': 'All'}, releases)
- self.assertIn({'id': 'icehouse', 'text': 'Icehouse'}, releases)
diff --git a/stackalytics/tests/api/test_stats.py b/stackalytics/tests/api/test_stats.py
deleted file mode 100644
index 9be761662..000000000
--- a/stackalytics/tests/api/test_stats.py
+++ /dev/null
@@ -1,178 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from stackalytics.tests.api import test_api
-
-
-class TestAPIStats(test_api.TestAPI):
-
- def test_get_modules(self):
- with test_api.make_runtime_storage(
- {
- 'repos': [
- {'module': 'nova', 'organization': 'openstack',
- 'uri': 'git://git.openstack.org/openstack/nova.git'},
- {'module': 'glance', 'organization': 'openstack',
- 'uri': 'git://git.openstack.org/openstack/glance.git'}
- ],
- 'releases': [{'release_name': 'prehistory',
- 'end_date': 1234567890},
- {'release_name': 'icehouse',
- 'end_date': 1234567890}],
- 'module_groups': {
- 'openstack': {'id': 'openstack',
- 'module_group_name': 'openstack',
- 'modules': ['nova', 'glance'],
- 'tag': 'group'},
- 'nova': test_api.make_module('nova'),
- 'glance': test_api.make_module('glance'),
- },
- 'project_types': [
- {'id': 'all', 'title': 'All',
- 'modules': ['nova', 'glance']}]},
- test_api.make_records(record_type=['commit'],
- loc=[10, 20, 30],
- module=['nova']),
- test_api.make_records(record_type=['commit'],
- loc=[100, 200, 300],
- module=['glance'])):
- response = self.app.get('/api/1.0/stats/modules?metric=loc&'
- 'project_type=all')
- stats = test_api.load_json(response)['stats']
- self.assertEqual(2, len(stats))
- self.assertEqual(600, stats[0]['metric'])
- self.assertEqual('glance', stats[0]['id'])
- self.assertEqual(60, stats[1]['metric'])
- self.assertEqual('nova', stats[1]['id'])
-
- def test_get_engineers(self):
- with test_api.make_runtime_storage(
- {
- 'repos': [
- {'module': 'nova', 'project_type': 'openstack',
- 'organization': 'openstack',
- 'uri': 'git://git.openstack.org/openstack/nova.git'},
- {'module': 'glance', 'project_type': 'openstack',
- 'organization': 'openstack',
- 'uri': 'git://git.openstack.org/openstack/glance.git'}
- ],
- 'releases': [{'release_name': 'prehistory',
- 'end_date': 1234567890},
- {'release_name': 'icehouse',
- 'end_date': 1234567890}],
- 'module_groups': {
- 'openstack': {'id': 'openstack',
- 'module_group_name': 'openstack',
- 'modules': ['nova', 'glance'],
- 'tag': 'group'},
- 'nova': test_api.make_module('nova'),
- 'glance': test_api.make_module('glance'),
- },
- 'project_types': [
- {'id': 'all', 'title': 'All',
- 'modules': ['nova', 'glance']}],
- 'user:john_doe': {
- 'seq': 1, 'user_id': 'john_doe',
- 'user_name': 'John Doe',
- 'companies': [{'company_name': 'NEC', 'end_date': 0}],
- 'emails': ['john_doe@gmail.com'], 'core': []},
- 'user:bill': {
- 'seq': 1, 'user_id': 'bill', 'user_name': 'Bill Smith',
- 'companies': [{'company_name': 'IBM', 'end_date': 0}],
- 'emails': ['bill_smith@gmail.com'], 'core': []}},
- test_api.make_records(record_type=['commit'],
- loc=[10, 20, 30],
- module=['nova'],
- user_id=['john_doe']),
- test_api.make_records(record_type=['commit'],
- loc=[100, 200, 300],
- module=['glance'],
- user_id=['john_doe']),
- test_api.make_records(record_type=['review'],
- primary_key=['0123456789'],
- module=['glance']),
- test_api.make_records(record_type=['mark'],
- review_id=['0123456789'],
- module=['glance'],
- user_id=['john_doe', 'bill'])):
- response = self.app.get('/api/1.0/stats/engineers?metric=loc&'
- 'project_type=all')
- stats = test_api.load_json(response)['stats']
- self.assertEqual(1, len(stats))
- self.assertEqual(660, stats[0]['metric'])
-
- def test_get_engineers_extended(self):
- with test_api.make_runtime_storage(
- {
- 'repos': [
- {'module': 'nova', 'project_type': 'openstack',
- 'organization': 'openstack',
- 'uri': 'git://git.openstack.org/openstack/nova.git'},
- {'module': 'glance', 'project_type': 'openstack',
- 'organization': 'openstack',
- 'uri': 'git://git.openstack.org/openstack/glance.git'}
- ],
- 'releases': [{'release_name': 'prehistory',
- 'end_date': 1234567890},
- {'release_name': 'icehouse',
- 'end_date': 1234567890}],
- 'module_groups': {
- 'openstack': {'id': 'openstack',
- 'module_group_name': 'openstack',
- 'modules': ['nova', 'glance'],
- 'tag': 'group'},
- 'nova': test_api.make_module('nova'),
- 'glance': test_api.make_module('glance'),
- },
- 'project_types': [
- {'id': 'all', 'title': 'All',
- 'modules': ['nova', 'glance']}],
- 'user:john_doe': {
- 'seq': 1, 'user_id': 'john_doe',
- 'user_name': 'John Doe',
- 'companies': [{'company_name': 'NEC', 'end_date': 0}],
- 'emails': ['john_doe@gmail.com'], 'core': []},
- 'user:smith': {
- 'seq': 1, 'user_id': 'smith',
- 'user_name': 'Bill Smith',
- 'companies': [{'company_name': 'IBM', 'end_date': 0}],
- 'emails': ['bill_smith@gmail.com'], 'core': []}},
- test_api.make_records(record_type=['commit'],
- loc=[10, 20, 30],
- module=['nova'],
- user_id=['john_doe']),
- test_api.make_records(record_type=['review'],
- primary_key=['0123456789', '9876543210'],
- module=['glance']),
- test_api.make_records(record_type=['mark'],
- review_id=['0123456789', '9876543210'],
- module=['glance'],
- value=[1],
- type=['Code-Review'],
- author_name=['John Doe'],
- user_id=['john_doe']),
- test_api.make_records(record_type=['mark'],
- review_id=['0123456789'],
- module=['glance'],
- author_name=['Bill Smith'],
- user_id=['smith'])):
- response = self.app.get('/api/1.0/stats/engineers_extended?'
- 'project_type=all')
- stats = test_api.load_json(response)['stats']
- self.assertEqual(2, len(stats))
- self.assertEqual(2, stats[0]['mark'])
- self.assertEqual('john_doe', stats[0]['id'])
- self.assertEqual(3, stats[0]['commit'])
- self.assertEqual(2, stats[0]['1'])
diff --git a/stackalytics/tests/api/test_users.py b/stackalytics/tests/api/test_users.py
deleted file mode 100644
index b14121c7e..000000000
--- a/stackalytics/tests/api/test_users.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from stackalytics.tests.api import test_api
-
-
-class TestAPIUsers(test_api.TestAPI):
-
- def test_users(self):
- with test_api.make_runtime_storage(
- {'repos': [
- {'module': 'nova', 'organization': 'openstack',
- 'uri': 'git://git.openstack.org/openstack/nova.git'}],
- 'project_types': [
- {'id': 'openstack', 'title': 'openstack',
- 'modules': ['nova', 'glance']}],
- 'releases': [{'release_name': 'prehistory',
- 'end_date': 1234567890},
- {'release_name': 'icehouse',
- 'end_date': 1234567890}],
- 'module_groups': {
- 'nova': test_api.make_module('nova'),
- 'glance': test_api.make_module('glance')},
- 'user:john_doe': {'user_name': 'John Doe'},
- 'user:bill_smith': {'user_name': 'Bill Smith'}},
- test_api.make_records(record_type=['commit'], module=['nova'],
- user_id=['john_doe', 'bill_smith'])):
- response = self.app.get('/api/1.0/users?'
- 'module=nova&metric=commits')
- users = test_api.load_json(response)['data']
- self.assertEqual(2, len(users))
- self.assertIn({'id': 'john_doe', 'text': 'John Doe'}, users)
- self.assertIn({'id': 'bill_smith', 'text': 'Bill Smith'}, users)
-
- def test_user_details(self):
- with test_api.make_runtime_storage(
- {'user:john_doe': {
- 'seq': 1, 'user_id': 'john_doe', 'user_name': 'John Doe',
- 'companies': [{'company_name': 'NEC', 'end_date': 0}],
- 'emails': 'john_doe@gmail.com'}},
- test_api.make_records(record_type=['commit'], module=['nova'],
- user_name=['John Doe', 'Bill Smith'])):
- response = self.app.get('/api/1.0/users/john_doe')
- user = test_api.load_json(response)['user']
- self.assertEqual('john_doe', user['user_id'])
-
- def test_user_not_found(self):
- with test_api.make_runtime_storage(
- {'user:john_doe': {
- 'seq': 1, 'user_id': 'john_doe', 'user_name': 'John Doe',
- 'companies': [{'company_name': 'NEC', 'end_date': 0}],
- 'emails': 'john_doe@gmail.com'},
- 'repos': [
- {'module': 'nova', 'organization': 'openstack',
- 'uri': 'git://git.openstack.org/openstack/nova.git'}],
- 'module_groups': {'openstack': {
- 'module_group_name': 'openstack',
- 'modules': ['nova']}}},
- test_api.make_records(record_type=['commit'], module=['nova'],
- user_name=['John Doe', 'Bill Smith'])):
- response = self.app.get('/api/1.0/users/nonexistent')
- self.assertEqual(404, response.status_code)
diff --git a/stackalytics/tests/unit/__init__.py b/stackalytics/tests/unit/__init__.py
deleted file mode 100644
index c9d84b54b..000000000
--- a/stackalytics/tests/unit/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-__author__ = 'ishakhat'
diff --git a/stackalytics/tests/unit/test_bps.py b/stackalytics/tests/unit/test_bps.py
deleted file mode 100644
index df58586a2..000000000
--- a/stackalytics/tests/unit/test_bps.py
+++ /dev/null
@@ -1,313 +0,0 @@
-# Copyright (c) 2015 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import mock
-import testtools
-
-from stackalytics.processor import bps
-
-
-BUG = json.loads("""
-{
- "date_closed": "2015-06-02T17:31:05.820479+00:00",
- "date_assigned": "2015-06-02T17:31:44.957976+00:00",
- "title": "Bug #1458945 in Sahara: \\\"Use graduated oslo.policy\\\"",
- "bug_link": "https://api.launchpad.net/devel/bugs/1458945",
- "bug_watch_link": null,
- "milestone_link": null,
- "date_left_closed": null,
- "date_fix_committed": "2015-06-02T17:31:05.820479+00:00",
- "date_fix_released": "2015-06-02T17:31:05.820479+00:00",
- "date_in_progress": "2015-06-02T17:31:05.820479+00:00",
- "resource_type_link": "https://api.launchpad.net/devel/#bug_task",
- "status": "Fix Released",
- "bug_target_name": "sahara",
- "importance": "Medium",
- "assignee_link": "https://api.launchpad.net/devel/~slukjanov",
- "date_triaged": "2015-06-02T17:31:05.820479+00:00",
- "self_link": "https://api.launchpad.net/devel/sahara/+bug/1458945",
- "target_link": "https://api.launchpad.net/devel/sahara",
- "bug_target_display_name": "Sahara",
- "related_tasks_collection_link":
- "https://api.launchpad.net/devel/sahara/+bug/1458945/related_tasks",
- "date_confirmed": "2015-06-02T17:31:05.820479+00:00",
- "date_left_new": "2015-06-02T17:31:05.820479+00:00",
- "web_link": "https://bugs.launchpad.net/sahara/+bug/1458945",
- "owner_link": "https://api.launchpad.net/devel/~samueldmq",
- "date_created": "2015-06-02T13:35:54.101235+00:00",
- "date_incomplete": null,
- "is_complete": true
-}
-""")
-
-ANOTHER_MILESTONE_BUG = json.loads("""
-{
- "date_closed": "2015-06-02T17:31:05.820479+00:00",
- "date_assigned": "2015-06-02T17:31:44.957976+00:00",
- "title": "Bug #1458945 in Sahara Kilo: \\\"Use graduated oslo.policy\\\"",
- "bug_link": "https://api.launchpad.net/devel/bugs/1458945",
- "bug_watch_link": null,
- "milestone_link": null,
- "date_left_closed": null,
- "date_fix_committed": "2015-06-02T17:31:05.820479+00:00",
- "date_fix_released": "2015-06-02T17:31:05.820479+00:00",
- "date_in_progress": "2015-06-02T17:31:05.820479+00:00",
- "resource_type_link": "https://api.launchpad.net/devel/#bug_task",
- "status": "Fix Released",
- "bug_target_name": "sahara/kilo",
- "importance": "Medium",
- "assignee_link": "https://api.launchpad.net/devel/~slukjanov",
- "date_triaged": "2015-06-02T17:31:05.820479+00:00",
- "self_link": "https://api.launchpad.net/devel/sahara/kilo/+bug/1458945",
- "target_link": "https://api.launchpad.net/devel/sahara/kilo",
- "bug_target_display_name": "Sahara Kilo",
- "related_tasks_collection_link":
- "https://api.launchpad.net/devel/sahara/kilo/+bug/1458945/related_tasks",
- "date_confirmed": "2015-06-02T17:31:05.820479+00:00",
- "date_left_new": "2015-06-02T17:31:05.820479+00:00",
- "web_link": "https://bugs.launchpad.net/sahara/kilo/+bug/1458945",
- "owner_link": "https://api.launchpad.net/devel/~samueldmq",
- "date_created": "2015-06-02T13:35:54.101235+00:00",
- "date_incomplete": null,
- "is_complete": true
-}
-""")
-
-LINKED_BUG = json.loads("""
-{
- "date_closed": "2015-06-24T20:59:57.982386+00:00",
- "date_assigned": "2015-06-18T06:46:03.741208+00:00",
- "title": "Bug #1458945 in Barbican: \\\"Use graduated oslo.policy\\\"",
- "bug_link": "https://api.launchpad.net/devel/bugs/1458945",
- "bug_watch_link": null,
- "milestone_link":
- "https://api.launchpad.net/devel/barbican/+milestone/liberty-1",
- "date_left_closed": null,
- "date_fix_committed": "2015-06-18T06:45:39.997949+00:00",
- "date_fix_released": "2015-06-24T20:59:57.982386+00:00",
- "date_in_progress": "2015-06-18T06:45:39.997949+00:00",
- "resource_type_link": "https://api.launchpad.net/devel/#bug_task",
- "status": "Fix Released",
- "bug_target_name": "barbican",
- "importance": "Medium",
- "assignee_link": "https://api.launchpad.net/devel/~juan-osorio-robles",
- "date_triaged": "2015-06-18T06:45:39.997949+00:00",
- "self_link": "https://api.launchpad.net/devel/barbican/+bug/1458945",
- "target_link": "https://api.launchpad.net/devel/barbican",
- "bug_target_display_name": "Barbican",
- "related_tasks_collection_link":
- "https://api.launchpad.net/devel/barbican/+bug/1458945/related_tasks",
- "date_confirmed": "2015-06-18T06:45:39.997949+00:00",
- "date_left_new": "2015-06-18T06:45:39.997949+00:00",
- "web_link": "https://bugs.launchpad.net/barbican/+bug/1458945",
- "owner_link": "https://api.launchpad.net/devel/~samueldmq",
- "date_created": "2015-05-26T17:47:32.438795+00:00",
- "date_incomplete": null,
- "is_complete": true
-}
-""")
-
-RELEASED_NOT_COMMITTED_BUG = json.loads("""
-{
- "date_closed": "2015-06-02T17:31:05.820479+00:00",
- "date_assigned": "2015-06-02T17:31:44.957976+00:00",
- "title": "Bug #1458945 in Sahara: \\\"Use graduated oslo.policy\\\"",
- "bug_link": "https://api.launchpad.net/devel/bugs/1458945",
- "bug_watch_link": null,
- "milestone_link": null,
- "date_left_closed": null,
- "date_fix_committed": null,
- "date_fix_released": "2015-06-02T17:31:05.820479+00:00",
- "date_in_progress": "2015-06-02T17:31:05.820479+00:00",
- "resource_type_link": "https://api.launchpad.net/devel/#bug_task",
- "status": "Fix Released",
- "bug_target_name": "sahara",
- "importance": "Medium",
- "assignee_link": "https://api.launchpad.net/devel/~slukjanov",
- "date_triaged": "2015-06-02T17:31:05.820479+00:00",
- "self_link": "https://api.launchpad.net/devel/sahara/+bug/1458945",
- "target_link": "https://api.launchpad.net/devel/sahara",
- "bug_target_display_name": "Sahara",
- "related_tasks_collection_link":
- "https://api.launchpad.net/devel/sahara/+bug/1458945/related_tasks",
- "date_confirmed": "2015-06-02T17:31:05.820479+00:00",
- "date_left_new": "2015-06-02T17:31:05.820479+00:00",
- "web_link": "https://bugs.launchpad.net/sahara/+bug/1458945",
- "owner_link": "https://api.launchpad.net/devel/~samueldmq",
- "date_created": "2015-06-02T13:35:54.101235+00:00",
- "date_incomplete": null,
- "is_complete": true
-}
-""")
-
-
-class TestBps(testtools.TestCase):
- def setUp(self):
- super(TestBps, self).setUp()
- p_module_exists = mock.patch(
- 'stackalytics.processor.launchpad_utils.lp_module_exists')
- m_module_exists = p_module_exists.start()
- m_module_exists.return_value = True
-
- @mock.patch('stackalytics.processor.launchpad_utils.lp_bug_generator')
- def test_log(self, lp_bug_generator):
- repo = {
- 'module': 'sahara'
- }
- modified_since = 1234567890
- lp_bug_generator.return_value = iter([BUG])
-
- expected = [{
- 'assignee': 'slukjanov',
- 'date_created': 1433252154,
- 'date_fix_committed': 1433266265,
- 'date_fix_released': 1433266265,
- 'id': 'sahara/1458945',
- 'importance': 'Medium',
- 'module': 'sahara',
- 'owner': 'samueldmq',
- 'status': 'Fix Released',
- 'title': 'Bug #1458945 in Sahara: "Use graduated oslo.policy"',
- 'web_link': 'https://bugs.launchpad.net/sahara/+bug/1458945'
- }]
-
- actual = list(bps.log(repo, modified_since))
-
- self.assertEqual(expected, actual)
-
- @mock.patch('stackalytics.processor.launchpad_utils.lp_bug_generator')
- def test_log_released_not_committed(self, lp_bug_generator):
- repo = {
- 'module': 'sahara'
- }
- modified_since = 1234567890
- lp_bug_generator.return_value = iter([RELEASED_NOT_COMMITTED_BUG])
-
- expected = [{
- 'assignee': 'slukjanov',
- 'date_created': 1433252154,
- 'date_fix_released': 1433266265,
- 'id': 'sahara/1458945',
- 'importance': 'Medium',
- 'module': 'sahara',
- 'owner': 'samueldmq',
- 'status': 'Fix Released',
- 'title': 'Bug #1458945 in Sahara: "Use graduated oslo.policy"',
- 'web_link': 'https://bugs.launchpad.net/sahara/+bug/1458945'
- }]
-
- actual = list(bps.log(repo, modified_since))
-
- self.assertEqual(expected, actual)
-
- @mock.patch('stackalytics.processor.launchpad_utils.lp_bug_generator')
- def test_log_additional_module(self, lp_bug_generator):
- # bug linked to another project should not appear
- repo = {
- 'module': 'sahara'
- }
- modified_since = 1234567890
- lp_bug_generator.return_value = iter([BUG, LINKED_BUG])
-
- expected = [{
- 'assignee': 'slukjanov',
- 'date_created': 1433252154,
- 'date_fix_committed': 1433266265,
- 'date_fix_released': 1433266265,
- 'id': 'sahara/1458945',
- 'importance': 'Medium',
- 'module': 'sahara',
- 'owner': 'samueldmq',
- 'status': 'Fix Released',
- 'title': 'Bug #1458945 in Sahara: "Use graduated oslo.policy"',
- 'web_link': 'https://bugs.launchpad.net/sahara/+bug/1458945'
- }]
-
- actual = list(bps.log(repo, modified_since))
-
- self.assertEqual(expected, actual)
-
- @mock.patch('stackalytics.processor.launchpad_utils.lp_bug_generator')
- def test_log_additional_milestone(self, lp_bug_generator):
- # bug linked to different milestone should be mapped to the release
- repo = {
- 'module': 'sahara'
- }
- modified_since = 1234567890
- lp_bug_generator.return_value = iter([BUG, ANOTHER_MILESTONE_BUG])
-
- expected = [{
- 'assignee': 'slukjanov',
- 'date_created': 1433252154,
- 'date_fix_committed': 1433266265,
- 'date_fix_released': 1433266265,
- 'id': 'sahara/1458945',
- 'importance': 'Medium',
- 'module': 'sahara',
- 'owner': 'samueldmq',
- 'status': 'Fix Released',
- 'title': 'Bug #1458945 in Sahara: "Use graduated oslo.policy"',
- 'web_link': 'https://bugs.launchpad.net/sahara/+bug/1458945'
- }, {
- 'assignee': 'slukjanov',
- 'date_created': 1433252154,
- 'date_fix_committed': 1433266265,
- 'date_fix_released': 1433266265,
- 'id': 'sahara/kilo/1458945',
- 'importance': 'Medium',
- 'module': 'sahara',
- 'release': 'kilo',
- 'owner': 'samueldmq',
- 'status': 'Fix Released',
- 'title': 'Bug #1458945 in Sahara Kilo: '
- '"Use graduated oslo.policy"',
- 'web_link': 'https://bugs.launchpad.net/sahara/kilo/+bug/1458945'
-
- }]
-
- actual = list(bps.log(repo, modified_since))
-
- self.assertEqual(expected, actual)
-
- @mock.patch('stackalytics.processor.launchpad_utils.lp_module_exists')
- @mock.patch('stackalytics.processor.launchpad_utils.lp_bug_generator')
- def test_log_module_alias(self, lp_bug_generator, lp_module_exists):
- # bug linked to another project should not appear
- repo = {
- 'module': 'savanna',
- 'aliases': ['sahara']
- }
- modified_since = 1234567890
- lp_bug_generator.return_value = iter([BUG])
- lp_module_exists.side_effect = iter([False, True])
-
- expected = [{
- 'assignee': 'slukjanov',
- 'date_created': 1433252154,
- 'date_fix_committed': 1433266265,
- 'date_fix_released': 1433266265,
- 'id': 'savanna/1458945',
- 'importance': 'Medium',
- 'module': 'savanna', # should be the same as primary module name
- 'owner': 'samueldmq',
- 'status': 'Fix Released',
- 'title': 'Bug #1458945 in Sahara: "Use graduated oslo.policy"',
- 'web_link': 'https://bugs.launchpad.net/sahara/+bug/1458945'
- }]
-
- actual = list(bps.log(repo, modified_since))
-
- self.assertEqual(expected, actual)
diff --git a/stackalytics/tests/unit/test_config_files.py b/stackalytics/tests/unit/test_config_files.py
deleted file mode 100644
index d1331f17c..000000000
--- a/stackalytics/tests/unit/test_config_files.py
+++ /dev/null
@@ -1,256 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import functools
-import json
-import os
-import stat
-
-import jsonschema
-import six
-import testtools
-
-from stackalytics.processor import normalizer
-from stackalytics.processor import schema as coded_schema
-
-
-IGNORED_COMPANIES = ['*robots', 'April', 'Chelsio Communications',
- 'CloudRunner.io', 'Datera', 'Facebook',
- 'Fermi National Accelerator Laboratory', 'Github',
- 'H3C',
- 'Huaxin Hospital, First Hospital of Tsinghua University',
- 'InfluxDB', 'Kickstarter', 'National Security Agency',
- 'OpenStack Foundation', 'OpenStack Korea User Group',
- 'ProphetStor', 'SVA System Vertrieb Alexander GmbH',
- 'Sencha', 'Stark & Wayne LLC', 'Styra',
- 'Suranee University of Technology',
- 'The Linux Foundation', 'UTi Worldwide', 'Undead Labs',
- 'Violin Memory', 'docCloud', 'npm']
-
-
-def dict_raise_on_duplicates(ordered_pairs):
- """Reject duplicate keys."""
- d = {}
- for k, v in ordered_pairs:
- if k in d:
- raise ValueError("duplicate key: %s (value: %s)" % (k, v))
- else:
- d[k] = v
- return d
-
-
-class TestConfigFiles(testtools.TestCase):
-
- def _read_raw_file(self, file_name):
- if six.PY3:
- opener = functools.partial(open, encoding='utf8')
- else:
- opener = open
- with opener(file_name, 'r') as content_file:
- return content_file.read()
-
- def _read_file(self, file_name):
- return json.loads(self._read_raw_file(file_name))
-
- def _verify_ordering(self, array, key, msg):
- comparator = lambda x, y: (x > y) - (x < y)
-
- diff_msg = ''
- for i in range(len(array) - 1):
- if comparator(key(array[i]), key(array[i + 1])) > 0:
- diff_msg = ('Order fails at index %(index)s, '
- 'elements:\n%(first)s:\n%(second)s' %
- {'index': i, 'first': array[i],
- 'second': array[i + 1]})
- break
- if diff_msg:
- self.fail(msg + '\n' + diff_msg)
-
- def test_corrections(self):
- corrections = self._read_file('etc/corrections.json')
- schema = self._read_file('etc/corrections.schema.json')
- jsonschema.validate(corrections, schema)
-
- def _verify_default_data_duplicate_keys(self, file_name):
- try:
- json.loads(self._read_raw_file(file_name),
- object_pairs_hook=dict_raise_on_duplicates)
- except ValueError as ve:
- self.fail(ve)
-
- def test_default_data_duplicate_keys(self):
- self._verify_default_data_duplicate_keys('etc/default_data.json')
-
- def test_test_default_data_duplicate_keys(self):
- self._verify_default_data_duplicate_keys('etc/test_default_data.json')
-
- def _verify_default_data_by_schema(self, file_name):
- default_data = self._read_file(file_name)
- try:
- jsonschema.validate(default_data, coded_schema.default_data)
- except jsonschema.ValidationError as e:
- self.fail(e)
-
- def test_default_data_schema_conformance(self):
- self._verify_default_data_by_schema('etc/default_data.json')
-
- def test_test_default_data_schema_conformance(self):
- self._verify_default_data_by_schema('etc/test_default_data.json')
-
- def _verify_companies_in_alphabetical_order(self, file_name):
- companies = self._read_file(file_name)['companies']
- self._verify_ordering(
- companies, key=lambda x: x['domains'][0],
- msg='List of companies should be ordered by the first domain')
-
- def test_companies_in_alphabetical_order(self):
- self._verify_companies_in_alphabetical_order('etc/default_data.json')
-
- def test_companies_in_alphabetical_order_in_test_file(self):
- self._verify_companies_in_alphabetical_order(
- 'etc/test_default_data.json')
-
- def _verify_users_in_alphabetical_order(self, file_name):
- users = self._read_file(file_name)['users']
- self._verify_ordering(
- users, key=lambda x: (x.get('launchpad_id') or x.get('github_id')),
- msg='List of users should be ordered by launchpad id or ldap id '
- 'or github id')
-
- def test_users_in_alphabetical_order(self):
- self._verify_users_in_alphabetical_order('etc/default_data.json')
-
- def test_users_in_alphabetical_order_in_test_file(self):
- self._verify_users_in_alphabetical_order('etc/test_default_data.json')
-
- def _check_collision(self, storage, user, field, field_name):
- self.assertNotIn(
- field, storage,
- 'Duplicate %s %s, collision between: %s and %s'
- % (field_name, field, storage[field], user))
- storage[field] = user
-
- def _verify_users_unique(self, file_name):
- users = self._read_file(file_name)['users']
- storage = {}
- for user in users:
- if user.get('launchpad_id'):
- field = user['launchpad_id']
- self.assertNotIn(
- field, storage,
- 'Duplicate launchpad_id %s, collision between: %s and %s'
- % (field, storage.get(field), user))
- storage[field] = user
-
- if user.get('gerrit_id'):
- field = user['gerrit_id']
- self.assertNotIn(
- ('gerrit:%s' % field), storage,
- 'Duplicate gerrit_id %s, collision between: %s and %s'
- % (field, storage.get(field), user))
- storage['gerrit:%s' % field] = user
-
- for email in user['emails']:
- self.assertNotIn(
- email, storage,
- 'Duplicate email %s, collision between: %s and %s'
- % (email, storage.get(email), user))
- storage[email] = user
-
- def test_users_unique_profiles(self):
- self._verify_users_unique('etc/default_data.json')
-
- def test_users_unique_profiles_in_test_file(self):
- self._verify_users_unique('etc/test_default_data.json')
-
- def _verify_default_data_whitespace_issues(self, file_name):
- data = self._read_raw_file(file_name)
- line_n = 1
- for line in data.split('\n'):
- msg = 'Whitespace issue in "%s", line %s: ' % (line, line_n)
- self.assertEqual(-1, line.find('\t'),
- message=msg + 'tab character')
- self.assertEqual(line.rstrip(), line,
- message=msg + 'trailing spaces')
- line_n += 1
-
- def test_default_data_whitespace_issues(self):
- self._verify_default_data_whitespace_issues('etc/default_data.json')
-
- def test_test_default_data_whitespace_issues(self):
- self._verify_default_data_whitespace_issues(
- 'etc/test_default_data.json')
-
- def _validate_default_data_correctness(self, file_name):
- data = self._read_file(file_name)
- normalizer.normalize_default_data(data)
-
- def test_default_data_user_profiles_correctness(self):
- self._validate_default_data_correctness('etc/default_data.json')
-
- def test_test_default_data_user_profiles_correctness(self):
- self._validate_default_data_correctness('etc/test_default_data.json')
-
- def _validate_user_companies(self, file_name):
- data = self._read_file(file_name)
- users = data['users']
- companies = data['companies']
- company_names = []
- for company in companies:
- company_names.append(company['company_name'])
- for alias in company.get('aliases', []):
- company_names.append(alias)
-
- for user in users:
- for company in user['companies']:
- if not company['company_name'] in IGNORED_COMPANIES:
- error_msg = ('Company "%s" is unknown. Please add it into'
- ' the list of companies in default_data.json '
- 'file' % company['company_name'])
- self.assertIn(company['company_name'], company_names,
- error_msg)
-
- def test_default_data_user_companies(self):
- self._validate_user_companies('etc/default_data.json')
-
- def test_test_default_data_user_companies(self):
- self._validate_user_companies('etc/test_default_data.json')
-
- def test_file_mode(self):
- files = os.listdir('etc')
- for f in ('etc/%s' % f for f in files):
- st = os.stat(f)
- x_flag = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
- self.assertFalse(bool(st.st_mode & x_flag),
- msg='File %s should not be executable' % f)
-
- def _verify_users_one_open_interval(self, file_name):
- users = self._read_file(file_name)['users']
- for user in users:
- ops = set([])
- for c in user['companies']:
- if not c['end_date']:
- ops.add(c['company_name'])
-
- self.assertLessEqual(
- len(ops), 1, msg='More than 1 company is specified '
- 'as current: %s. Please keep '
- 'only one' % ', '.join(ops))
-
- def test_default_data_users_one_open_interval(self):
- self._verify_users_one_open_interval('etc/default_data.json')
-
- def test_test_default_data_users_one_open_interval(self):
- self._verify_users_one_open_interval('etc/test_default_data.json')
diff --git a/stackalytics/tests/unit/test_data.py b/stackalytics/tests/unit/test_data.py
deleted file mode 100644
index 355e15da9..000000000
--- a/stackalytics/tests/unit/test_data.py
+++ /dev/null
@@ -1,83 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-DEFAULT_DATA = {
- 'users': [
- {
- 'launchpad_id': 'john_doe',
- 'user_name': 'John Doe',
- 'emails': ['johndoe@gmail.com', 'jdoe@nec.com'],
- 'companies': [
- {'company_name': '*independent', 'end_date': '2013-May-01'},
- {'company_name': 'NEC', 'end_date': None},
- ]
- },
- {
- 'launchpad_id': 'smith',
- 'user_name': 'Smith',
- 'emails': ['smith@gmail.com', 'smith@nec.com'],
- 'companies': [
- {'company_name': 'IBM', 'end_date': '2013-May-01'},
- {'company_name': 'NEC', 'end_date': '2014-Jun-01'}
- ]
- },
- {
- 'launchpad_id': 'ivan_ivanov',
- 'user_name': 'Ivan Ivanov',
- 'emails': ['ivanivan@yandex.ru', 'iivanov@mirantis.com'],
- 'companies': [
- {'company_name': 'Mirantis', 'end_date': None},
- ]
- }
- ],
- 'companies': [
- {
- 'company_name': '*independent',
- 'domains': ['']
- },
- {
- 'company_name': 'NEC',
- 'domains': ['nec.com', 'nec.co.jp']
- },
- {
- 'company_name': 'Mirantis',
- 'domains': ['mirantis.com', 'mirantis.ru']
- },
- ],
- 'repos': [
- {
- 'branches': ['master'],
- 'module': 'stackalytics',
- 'project_type': 'stackforge',
- 'uri': 'git://git.openstack.org/stackforge/stackalytics.git'
- }
- ],
- 'releases': [
- {
- 'release_name': 'prehistory',
- 'end_date': '2011-Apr-21'
- },
- {
- 'release_name': 'Havana',
- 'end_date': '2013-Oct-17'
- }
- ]
-}
-
-USERS = DEFAULT_DATA['users']
-REPOS = DEFAULT_DATA['repos']
-COMPANIES = DEFAULT_DATA['companies']
-RELEASES = DEFAULT_DATA['releases']
diff --git a/stackalytics/tests/unit/test_default_data_processor.py b/stackalytics/tests/unit/test_default_data_processor.py
deleted file mode 100644
index 498efe9a0..000000000
--- a/stackalytics/tests/unit/test_default_data_processor.py
+++ /dev/null
@@ -1,139 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import copy
-
-import mock
-import testtools
-
-from stackalytics.processor import default_data_processor
-from stackalytics.processor import normalizer
-from stackalytics.tests.unit import test_data
-
-
-class TestDefaultDataProcessor(testtools.TestCase):
- def setUp(self):
- super(TestDefaultDataProcessor, self).setUp()
-
- self.get_users = mock.Mock(return_value=[
- test_data.USERS,
- ])
-
- normalized_data = copy.deepcopy(test_data.DEFAULT_DATA)
- normalizer.normalize_default_data(normalized_data)
-
- def tearDown(self):
- super(TestDefaultDataProcessor, self).tearDown()
-
- def test_normalizer(self):
- data = copy.deepcopy(test_data.DEFAULT_DATA)
-
- normalizer.normalize_default_data(data)
-
- self.assertIn('releases', data['repos'][0])
- self.assertEqual([], data['repos'][0]['releases'],
- message='Empty list of releases expected')
- self.assertEqual(0, data['users'][0]['companies'][-1]['end_date'],
- message='The last company end date should be 0')
- self.assertIn('user_id', data['users'][0])
- self.assertEqual(test_data.USERS[0]['launchpad_id'],
- data['users'][0]['user_id'],
- message='User id should be set')
-
- # verify that *independent company is added automatically
- self.assertEqual(3, len(data['users'][1]['companies']))
- self.assertEqual(0, data['users'][1]['companies'][-1]['end_date'],
- message='The last company end date should be 0')
-
- def test_update_project_list(self):
- with mock.patch('stackalytics.processor.default_data_processor.'
- '_retrieve_project_list_from_gerrit') as retriever:
- retriever.return_value = [
- {'module': 'nova',
- 'uri': 'git://git.openstack.org/openstack/nova',
- 'organization': 'openstack'},
- {'module': 'qa', 'uri': 'git://git.openstack.org/openstack/qa',
- 'has_gerrit': True,
- 'organization': 'openstack'},
- {'module': 'deb-nova',
- 'uri': 'git://git.openstack.org/openstack/deb-nova',
- 'organization': 'openstack'},
- ]
- dd = {
- 'repos': [
- {'module': 'qa',
- 'uri': 'git://git.openstack.org/openstack/qa',
- 'organization': 'openstack'},
- {'module': 'tux',
- 'uri': 'git://git.openstack.org/stackforge/tux',
- 'organization': 'stackforge'},
- ],
- 'project_sources': [{'organization': 'openstack',
- 'uri': 'gerrit://'}],
- 'module_groups': [],
- }
-
- default_data_processor._update_project_list(dd)
-
- self.assertEqual(3, len(dd['repos']))
- self.assertIn('qa', set([r['module'] for r in dd['repos']]))
- self.assertIn('nova', set([r['module'] for r in dd['repos']]))
- self.assertNotIn('deb-nova',
- set([r['module'] for r in dd['repos']]))
- self.assertIn('tux', set([r['module'] for r in dd['repos']]))
-
- self.assertIn('has_gerrit', dd['repos'][0])
- self.assertNotIn('has_gerrit', dd['repos'][1])
- self.assertNotIn('has_gerrit', dd['repos'][2])
-
- self.assertEqual(2, len(dd['module_groups']))
- self.assertIn({'id': 'openstack',
- 'module_group_name': 'openstack',
- 'modules': ['qa', 'nova'],
- 'tag': 'organization'}, dd['module_groups'])
- self.assertIn({'id': 'stackforge',
- 'module_group_name': 'stackforge',
- 'modules': ['tux'],
- 'tag': 'organization'}, dd['module_groups'])
-
- def test_update_project_list_ext_project_source(self):
- with mock.patch('stackalytics.processor.default_data_processor.'
- '_retrieve_project_list_from_github') as retriever:
- retriever.return_value = [
- {'module': 'kubernetes',
- 'uri': 'git://github.com/kubernetes/kubernetes',
- 'organization': 'kubernetes'},
- ]
- dd = {
- 'repos': [],
- 'project_sources': [
- {'organization': 'kubernetes',
- 'uri': 'github://',
- 'module_group_id': 'kubernetes-group'},
- ],
- 'module_groups': [],
- }
-
- default_data_processor._update_project_list(dd)
-
- self.assertEqual(1, len(dd['repos']))
- self.assertIn('kubernetes',
- set([r['module'] for r in dd['repos']]))
-
- self.assertEqual(1, len(dd['module_groups']))
- self.assertIn({'id': 'kubernetes-group',
- 'module_group_name': 'kubernetes',
- 'modules': ['kubernetes'],
- 'tag': 'organization'}, dd['module_groups'])
diff --git a/stackalytics/tests/unit/test_dump.py b/stackalytics/tests/unit/test_dump.py
deleted file mode 100644
index 0741a2c7f..000000000
--- a/stackalytics/tests/unit/test_dump.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# Copyright (c) 2014 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import memcache
-import mock
-import testtools
-
-from stackalytics.processor import dump
-
-
-class TestDump(testtools.TestCase):
-
- def _make_data(self, record_count):
- data = {'record:count': record_count}
- for i in range(record_count):
- data['record:%d' % i] = i
- return data
-
- def test_export_data_records(self):
- record_count = 153
- data = self._make_data(record_count)
- memcache_inst = mock.Mock(memcache.Client)
- memcache_inst.get = lambda x: data.get(x)
- memcache_inst.get_multi = lambda keys, key_prefix: dict(
- ('%s' % n, data.get(key_prefix + '%s' % n)) for n in keys)
-
- with mock.patch('pickle.dump') as pickle_dump:
- fd = mock.Mock()
- dump.export_data(memcache_inst, fd)
- # self.assertEquals(total, pickle_dump.call_count)
-
- expected_calls = [mock.call(('record:count', record_count), fd)]
- for i in range(record_count):
- expected_calls.append(mock.call(('record:%d' % i,
- data['record:%d' % i]), fd))
- pickle_dump.assert_has_calls(expected_calls, any_order=True)
-
- def test_export_data_records_get_multi_truncates_chunk(self):
- record_count = 153
- data = self._make_data(record_count)
- memcache_inst = mock.Mock(memcache.Client)
- memcache_inst.get = lambda x: data.get(x)
- memcache_inst.get_multi = lambda keys, key_prefix: dict(
- ('%s' % n, data.get(key_prefix + '%s' % n))
- for n in [k for k, v in zip(keys, range(len(keys) - 1))])
-
- with mock.patch('pickle.dump') as pickle_dump:
- fd = mock.Mock()
- dump.export_data(memcache_inst, fd)
- # self.assertEquals(total, pickle_dump.call_count)
-
- expected_calls = [mock.call(('record:count', record_count), fd)]
- for i in range(record_count):
- expected_calls.append(mock.call(('record:%d' % i,
- data['record:%d' % i]), fd))
- pickle_dump.assert_has_calls(expected_calls, any_order=True)
diff --git a/stackalytics/tests/unit/test_governance.py b/stackalytics/tests/unit/test_governance.py
deleted file mode 100644
index 617b79944..000000000
--- a/stackalytics/tests/unit/test_governance.py
+++ /dev/null
@@ -1,133 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import mock
-import testtools
-
-from stackalytics.processor import governance
-
-
-SAMPLE = """
-Sahara:
- ptl: Sergey Lukjanov (SergeyLukjanov)
- irc-channel: openstack-sahara
- service: Data processing service
- mission: >
- To provide a scalable data processing stack and associated management
- interfaces.
- url: https://wiki.openstack.org/wiki/Sahara
- deliverables:
- python-saharaclient:
- repos:
- - openstack/python-saharaclient
- tags:
- - release:cycle-with-intermediary
- - release:has-stable-branches
- - type:library
- - release:managed
- - vulnerability:managed
- sahara:
- repos:
- - openstack/sahara
- - openstack/sahara-extra
- - openstack/sahara-image-elements
- tags:
- - tc:approved-release
- - release:managed
- - release:cycle-with-milestones
- - release:has-stable-branches
- - type:service
- - vulnerability:managed
- sahara-dashboard:
- repos:
- - openstack/sahara-dashboard
- tags:
- - type:library
- sahara-specs:
- repos:
- - openstack/sahara-specs
-"""
-
-
-class TestGovernance(testtools.TestCase):
-
- @mock.patch('stackalytics.processor.utils.read_uri')
- def test_process_official_list(self, read_uri):
- read_uri.return_value = SAMPLE
-
- expected = {
- 'sahara-group': {
- 'id': 'sahara-group',
- 'module_group_name': 'Sahara Official',
- 'modules': {'python-saharaclient', 'sahara',
- 'sahara-dashboard', 'sahara-extra',
- 'sahara-image-elements', 'sahara-specs'},
- 'tag': 'program'
- },
- 'tc:approved-release': {
- 'id': 'tc:approved-release',
- 'module_group_name': 'tc:approved-release',
- 'modules': set(),
- 'releases': {
- 'liberty': {'sahara', 'sahara-extra',
- 'sahara-image-elements'},
- },
- 'tag': 'project_type'
- },
- 'type:library': {
- 'id': 'type:library',
- 'module_group_name': 'type:library',
- 'modules': set(),
- 'releases': {
- 'liberty': {'python-saharaclient', 'sahara-dashboard'},
- },
- 'tag': 'project_type'
- },
- 'type:service': {
- 'id': 'type:service',
- 'module_group_name': 'type:service',
- 'modules': set(),
- 'releases': {
- 'liberty': {'sahara', 'sahara-extra',
- 'sahara-image-elements'},
- },
- 'tag': 'project_type'
- },
- 'openstack-official': {
- 'id': 'openstack-official',
- 'module_group_name': 'openstack-official',
- 'modules': set(),
- 'releases': {
- 'liberty': {'python-saharaclient', 'sahara',
- 'sahara-dashboard', 'sahara-extra',
- 'sahara-image-elements', 'sahara-specs'},
- },
- 'tag': 'project_type'
- },
- 'openstack-others': {
- 'id': 'openstack-others',
- 'module_group_name': 'openstack-others',
- 'modules': set(),
- 'releases': {},
- 'tag': 'project_type'
- }
- }
-
- releases = [{
- 'release_name': 'Liberty',
- 'refs': {'governance': {'type': 'big_tent', 'source': 'uri'}}
- }]
-
- actual = governance.process_official_list(releases)
-
- self.assertEqual(expected, actual)
diff --git a/stackalytics/tests/unit/test_helpers.py b/stackalytics/tests/unit/test_helpers.py
deleted file mode 100644
index 6ac6daa10..000000000
--- a/stackalytics/tests/unit/test_helpers.py
+++ /dev/null
@@ -1,95 +0,0 @@
-# Copyright (c) 2015 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import mock
-import testtools
-
-from stackalytics.dashboard import helpers
-
-
-class TestHelpers(testtools.TestCase):
-
- @mock.patch('time.time')
- def test_get_current_company(self, mock_time_time):
- current_timestamp = 1234567890
- mock_time_time.return_value = current_timestamp
-
- user = {
- 'user_id': 'smith',
- 'user_name': 'John Smith',
- 'companies': [{
- 'company_name': 'Current',
- 'end_date': current_timestamp + 1
- }, {
- 'company_name': 'TheCompany',
- 'end_date': 0
- }]
- }
-
- self.assertEqual('Current', helpers.get_current_company(user))
-
- @mock.patch('stackalytics.dashboard.helpers.make_link')
- def test_extend_user(self, mock_make_link):
- company_link = mock.Mock()
- mock_make_link.return_value = company_link
-
- user = {
- 'user_id': 'smith',
- 'user_name': 'John Smith',
- 'companies': [{
- 'company_name': 'TheCompany',
- 'end_date': 0
- }]
- }
-
- expected = {
- 'user_id': 'smith',
- 'user_name': 'John Smith',
- 'companies': [{
- 'company_name': 'TheCompany',
- 'end_date': 0
- }],
- 'id': 'smith',
- 'company_link': company_link,
- 'text': 'John Smith',
- }
-
- observed = helpers.extend_user(user)
- self.assertEqual(expected, observed)
- mock_make_link.assert_called_once_with('TheCompany', '/', mock.ANY)
-
- @mock.patch('time.time')
- @mock.patch('stackalytics.dashboard.helpers.make_link')
- def test_extend_user_current_company(self, mock_make_link, mock_time_time):
- company_link = mock.Mock()
- mock_make_link.return_value = company_link
- current_timestamp = 1234567890
- mock_time_time.return_value = current_timestamp
-
- user = {
- 'user_id': 'smith',
- 'user_name': 'John Smith',
- 'companies': [{
- 'company_name': 'Current',
- 'end_date': current_timestamp + 1
- }, {
- 'company_name': 'TheCompany',
- 'end_date': 0
- }]
- }
-
- helpers.extend_user(user)
-
- mock_make_link.assert_called_once_with('Current', '/', mock.ANY)
diff --git a/stackalytics/tests/unit/test_launchpad_utils.py b/stackalytics/tests/unit/test_launchpad_utils.py
deleted file mode 100644
index 0dd62d9b7..000000000
--- a/stackalytics/tests/unit/test_launchpad_utils.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import mock
-import testtools
-
-from stackalytics.processor import launchpad_utils
-
-
-class TestLaunchpadUtils(testtools.TestCase):
-
- @mock.patch('stackalytics.processor.launchpad_utils._lp_profile_by_email')
- def test_get_lp_info(self, lp_mock):
- lp_mock.return_value = dict(name='john', display_name='smith')
-
- observed = launchpad_utils.query_lp_info('john@smith.to')
-
- self.assertEqual(('john', 'smith'), observed)
- lp_mock.assert_called_once_with('john@smith.to')
-
- @mock.patch('stackalytics.processor.launchpad_utils._lp_profile_by_email')
- def test_get_lp_info_not_found(self, lp_mock):
- lp_mock.return_value = None
-
- observed = launchpad_utils.query_lp_info('john@smith.to')
-
- self.assertEqual((None, None), observed)
- lp_mock.assert_called_once_with('john@smith.to')
-
- @mock.patch('stackalytics.processor.launchpad_utils._lp_profile_by_email')
- def test_get_lp_info_invalid_email(self, lp_mock):
-
- observed = launchpad_utils.query_lp_info('error.root')
-
- self.assertEqual((None, None), observed)
- lp_mock.assert_not_called()
diff --git a/stackalytics/tests/unit/test_mls.py b/stackalytics/tests/unit/test_mls.py
deleted file mode 100644
index 3e6a2d49d..000000000
--- a/stackalytics/tests/unit/test_mls.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import re
-
-import mock
-import testtools
-
-from stackalytics.processor import mls
-
-
-EMAIL_CONTENT = '''
-From sorlando at nicira.com Tue Jul 17 07:30:43 2012
-From: sorlando at nicira.com (Salvatore Orlando)
-Date: Tue, 17 Jul 2012 00:30:43 -0700
-Subject: [openstack-dev] [nova] [pci device passthrough] fails with
- "NameError: global name '_' is not defined"
-In-Reply-To: <5004FBF1.1080102@redhat.com>
-References: <5004FBF1.1080102@redhat.com>
-Message-ID:
-
-Good morning Gary!
------------------
-
-test works :)
-
-> Reply
-'''
-
-
-class TestMls(testtools.TestCase):
-
- def test_mail_parse_regex(self):
-
- content = '''
-URL:
-
-From sorlando at nicira.com Tue Jul 17 07:30:43 2012
-From: sorlando at nicira.com (Salvatore Orlando)
-Date: Tue, 17 Jul 2012 00:30:43 -0700
-Subject: [openstack-dev] [nova] [pci device passthrough] fails with
- "NameError: global name '_' is not defined"
-In-Reply-To: <5004FBF1.1080102@redhat.com>
-References: <5004FBF1.1080102@redhat.com>
-Message-ID:
-
-Good morning Gary!
-
-test works :)
-
-From sorlando at nicira.com Tue Jul 17 07:30:43 2012
-From: sorlando at nicira.com (Salvatore Orlando)
- '''
- match = re.search(mls.MAIL_BOX_PATTERN, content)
- self.assertTrue(match)
- self.assertEqual('sorlando at nicira.com', match.group(1))
- self.assertEqual('Salvatore Orlando', match.group(2))
- self.assertEqual('Tue, 17 Jul 2012 00:30:43 -0700', match.group(3))
- self.assertEqual('[openstack-dev] [nova] [pci device passthrough] '
- 'fails with\n "NameError: global name \'_\' is not '
- 'defined"', match.group(4))
- self.assertEqual('', match.group(5))
- self.assertEqual('Good morning Gary!\n\ntest works :)\n',
- match.group(6))
-
- @mock.patch('stackalytics.processor.utils.read_gzip_from_uri')
- @mock.patch('stackalytics.processor.mls._get_mail_archive_links')
- @mock.patch('stackalytics.processor.mls._uri_content_changed')
- def test_log(self, mock_uri_content_changed, mock_get_mail_archive_links,
- mock_read_gzip_from_uri):
- mock_uri_content_changed.return_value = True
- mock_get_mail_archive_links.return_value = ['link']
- mock_read_gzip_from_uri.return_value = EMAIL_CONTENT
- mock_rsi = mock.Mock()
-
- emails = list(mls.log('uri', mock_rsi))
-
- self.assertEqual(1, len(emails))
- self.assertEqual('Good morning Gary!\n\ntest works :)\n',
- emails[0]['body'])
diff --git a/stackalytics/tests/unit/test_mps.py b/stackalytics/tests/unit/test_mps.py
deleted file mode 100644
index 54012a1fd..000000000
--- a/stackalytics/tests/unit/test_mps.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import re
-
-import testtools
-
-from stackalytics.processor import mps
-
-
-class TestMps(testtools.TestCase):
-
- def test_member_parse_regex(self):
-
- content = '''Individual Member Profile
-
-
-
-
-
-
-
-
Jim Battenberg
-
-
Date Joined
-
June 25, 2013
-
Affiliations
-
-
- Rackspace From (Current)
-
-
-
Statement of Interest
-
-
contribute logic and evangelize openstack
-
-
'''
-
- match = re.search(mps.NAME_AND_DATE_PATTERN, content)
- self.assertTrue(match)
- self.assertEqual('Jim Battenberg', match.group('member_name'))
- self.assertEqual('June 25, 2013 ', match.group('date_joined'))
-
- match = re.search(mps.COMPANY_PATTERN, content)
- self.assertTrue(match)
- self.assertEqual('Rackspace', match.group('company_draft'))
diff --git a/stackalytics/tests/unit/test_openstackid_utils.py b/stackalytics/tests/unit/test_openstackid_utils.py
deleted file mode 100644
index 5c7f32bc8..000000000
--- a/stackalytics/tests/unit/test_openstackid_utils.py
+++ /dev/null
@@ -1,355 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import mock
-import testtools
-
-from stackalytics.processor import openstackid_utils as ou
-
-USER_PROFILE = {
- "total": 1,
- "data": [
- {
- "id": 5555,
- "first_name": "John",
- "last_name": "Smith",
- "affiliations": [
- {
- "start_date": 1193875200,
- "end_date": 1496188800,
- "organization": {
- "name": "Mirantis"
- }
- },
- {
- "start_date": 1496275200,
- "end_date": None,
- "organization": {
- "name": "Huawei"
- }
- }
- ]
- }
- ]
-}
-USER_PROFILE_NO_AFFILIATIONS = {
- "total": 1,
- "data": [
- {
- "id": 5555,
- "first_name": "John",
- "last_name": "Smith",
- "affiliations": []
- }
- ]
-}
-USER_PROFILE_NO_DATES = {
- "total": 1,
- "data": [
- {
- "id": 2222,
- "first_name": "John",
- "last_name": "Smith",
- "affiliations": [
- {
- "start_date": None,
- "end_date": None,
- "is_current": True,
- "organization": {
- "name": "Hewlett Packard Enterprise"
- }
- }
- ]
- }
- ]
-}
-USER_PROFILE_MULTIPLE_RECORDS = {
- "total": 1,
- "data": [
- {
- "id": 2222,
- "first_name": "John",
- "last_name": "Smith",
- "affiliations": [
- {
- "start_date": None,
- "end_date": None,
- "is_current": True,
- "organization": {
- "name": "Hewlett Packard Enterprise"
- }
- }
- ]
- },
- {
- "id": 5555,
- "first_name": "John",
- "last_name": "Smith",
- "affiliations": [
- {
- "start_date": 1456790400,
- "end_date": None,
- "organization": {
- "name": "Red Hat"
- }
- }
- ]
- }
- ]
-}
-USER_PROFILE_NO_CURRENT = {
- "total": 1,
- "data": [
- {
- "id": 5555,
- "first_name": "John",
- "last_name": "Smith",
- "affiliations": [
- {
- "start_date": 1193875200,
- "end_date": 1496188800,
- "organization": {
- "name": "Mirantis"
- }
- }
- ]
- }
- ]
-}
-USER_PROFILE_MULTIPLE_CURRENT = {
- "total": 1,
- "data": [
- {
- "id": 1111,
- "first_name": "John",
- "last_name": "Smith",
- "affiliations": [
- {
- "start_date": 1378339200,
- "end_date": None,
- "is_current": True,
- "organization": {
- "name": "NTT"
- }
- },
- {
- "start_date": 1442880000,
- "end_date": None,
- "is_current": True,
- "organization": {
- "name": "NTT DATA"
- }
- }
- ]
- }
- ]
-}
-
-
-class TestOpenStackIDUtils(testtools.TestCase):
-
- def test_iterate_intervals(self):
- origin = [ou.Interval(100, 200, 'a'),
- ou.Interval(200, 0, 'b')]
- expected = [ou.Interval(0, 100, None),
- ou.Interval(100, 200, 'a'),
- ou.Interval(200, 0, 'b')]
-
- observed = list(ou._iterate_intervals(origin, threshold=10))
- self.assertEqual(expected, observed)
-
- def test_iterate_intervals_2(self):
- origin = [ou.Interval(100, 200, 'a'),
- ou.Interval(300, 400, 'b')]
- expected = [ou.Interval(0, 100, None),
- ou.Interval(100, 200, 'a'),
- ou.Interval(200, 300, None),
- ou.Interval(300, 400, 'b'),
- ou.Interval(400, 0, None)]
-
- observed = list(ou._iterate_intervals(origin, threshold=10))
- self.assertEqual(expected, observed)
-
- @mock.patch('stackalytics.processor.utils.read_json_from_uri')
- def test_user_profile_by_email(self, reader_mock):
- reader_mock.return_value = USER_PROFILE
- email = 'dummy@dummy.org'
-
- expected = {
- 'openstack_id': 5555,
- 'user_name': 'John Smith',
- 'emails': [email],
- 'companies': [{
- 'company_name': '*independent',
- 'end_date': 1193875200
- }, {
- 'company_name': 'Mirantis',
- 'end_date': 1496188800
- }, {
- 'company_name': 'Huawei',
- 'end_date': 0
- }]
- }
-
- observed = ou.user_profile_by_email(email)
-
- reader_mock.assert_called_once_with(
- ou.OSID_URI % email, session=ou._openstackid_session)
- self.assertEqual(expected, observed)
-
- @mock.patch('stackalytics.processor.utils.read_json_from_uri')
- def test_user_profile_by_email_not_affiliated(self, reader_mock):
- reader_mock.return_value = USER_PROFILE_NO_AFFILIATIONS
- email = 'dummy@dummy.org'
-
- expected = {
- 'openstack_id': 5555,
- 'user_name': 'John Smith',
- 'emails': [email],
- 'companies': [{
- 'company_name': '*independent',
- 'end_date': 0
- }]
- }
-
- observed = ou.user_profile_by_email(email)
-
- reader_mock.assert_called_once_with(
- ou.OSID_URI % email, session=ou._openstackid_session)
- self.assertEqual(expected, observed)
-
- @mock.patch('stackalytics.processor.utils.read_json_from_uri')
- def test_user_profile_by_email_not_found(self, reader_mock):
- reader_mock.return_value = {
- "total": 0,
- "data": []
- }
- email = 'dummy@dummy.org'
-
- expected = None
- observed = ou.user_profile_by_email(email)
-
- reader_mock.assert_called_once_with(
- ou.OSID_URI % email, session=ou._openstackid_session)
- self.assertEqual(expected, observed)
-
- @mock.patch('stackalytics.processor.utils.read_json_from_uri')
- def test_user_profile_by_email_not_read(self, reader_mock):
- reader_mock.return_value = None
- email = 'dummy@dummy.org'
-
- expected = None
- observed = ou.user_profile_by_email(email)
-
- reader_mock.assert_called_once_with(
- ou.OSID_URI % email, session=ou._openstackid_session)
- self.assertEqual(expected, observed)
-
- @mock.patch('stackalytics.processor.utils.read_json_from_uri')
- def test_user_profile_by_email_no_dates(self, reader_mock):
- reader_mock.return_value = USER_PROFILE_NO_DATES
- email = 'dummy@dummy.org'
-
- expected = {
- 'openstack_id': 2222,
- 'user_name': 'John Smith',
- 'emails': [email],
- 'companies': [{
- 'company_name': 'Hewlett Packard Enterprise',
- 'end_date': 0
- }]
- }
-
- observed = ou.user_profile_by_email(email)
-
- reader_mock.assert_called_once_with(
- ou.OSID_URI % email, session=ou._openstackid_session)
- self.assertEqual(expected, observed)
-
- @mock.patch('stackalytics.processor.utils.read_json_from_uri')
- def test_user_profile_by_email_multiple_records(self, reader_mock):
- reader_mock.return_value = USER_PROFILE_MULTIPLE_RECORDS
- email = 'dummy@dummy.org'
-
- expected = {
- 'openstack_id': 5555,
- 'user_name': 'John Smith',
- 'emails': [email],
- 'companies': [{
- 'company_name': '*independent',
- 'end_date': 1456790400
- }, {
- 'company_name': 'Red Hat',
- 'end_date': 0
- }]
- }
-
- observed = ou.user_profile_by_email(email)
-
- reader_mock.assert_called_once_with(
- ou.OSID_URI % email, session=ou._openstackid_session)
- self.assertEqual(expected, observed)
-
- @mock.patch('stackalytics.processor.utils.read_json_from_uri')
- def test_user_profile_by_email_no_current(self, reader_mock):
- reader_mock.return_value = USER_PROFILE_NO_CURRENT
- email = 'dummy@dummy.org'
-
- expected = {
- 'openstack_id': 5555,
- 'user_name': 'John Smith',
- 'emails': [email],
- 'companies': [{
- 'company_name': '*independent',
- 'end_date': 1193875200
- }, {
- 'company_name': 'Mirantis',
- 'end_date': 1496188800
- }, {
- 'company_name': '*independent',
- 'end_date': 0
- }]
- }
-
- observed = ou.user_profile_by_email(email)
-
- reader_mock.assert_called_once_with(
- ou.OSID_URI % email, session=ou._openstackid_session)
- self.assertEqual(expected, observed)
-
- @mock.patch('stackalytics.processor.utils.read_json_from_uri')
- def test_user_profile_by_email_multiple_current(self, reader_mock):
- reader_mock.return_value = USER_PROFILE_MULTIPLE_CURRENT
- email = 'dummy@dummy.org'
-
- expected = {
- 'openstack_id': 1111,
- 'user_name': 'John Smith',
- 'emails': [email],
- 'companies': [{
- 'company_name': '*independent',
- 'end_date': 1378339200
- }, {
- 'company_name': 'NTT',
- 'end_date': 1442880000
- }, {
- 'company_name': 'NTT DATA',
- 'end_date': 0
- }]
- }
-
- observed = ou.user_profile_by_email(email)
-
- reader_mock.assert_called_once_with(
- ou.OSID_URI % email, session=ou._openstackid_session)
- self.assertEqual(expected, observed)
diff --git a/stackalytics/tests/unit/test_rcs.py b/stackalytics/tests/unit/test_rcs.py
deleted file mode 100644
index 7a77f79d9..000000000
--- a/stackalytics/tests/unit/test_rcs.py
+++ /dev/null
@@ -1,189 +0,0 @@
-# Copyright (c) 2015 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-
-import mock
-import testtools
-
-from stackalytics.processor import rcs
-
-REVIEW_ONE = json.dumps(
- {"project": "openstack/nova", "branch": "master", "topic": "bug/1494374",
- "id": "Id741dfc769c02a5544691a7db49a7dbff6b11376", "number": "229382",
- "subject": "method is nearly 400 LOC and should be broken up",
- "createdOn": 1443613948, "lastUpdated": 1444222222,
- "sortKey": "0038481b00038006", "open": True, "status": "NEW"})
-REVIEW_END_LINE = json.dumps(
- {"type": "stats", "rowCount": 2, "runTimeMilliseconds": 13})
-
-
-class TestRcs(testtools.TestCase):
-
- @mock.patch('paramiko.SSHClient')
- def test_setup(self, mock_client_cons):
- mock_client = mock.Mock()
- mock_client_cons.return_value = mock_client
-
- mock_connect = mock.Mock()
- mock_client.connect = mock_connect
-
- gerrit = rcs.Gerrit('gerrit://review.openstack.org')
- gerrit.setup(username='user', key_filename='key')
-
- mock_connect.assert_called_once_with(
- 'review.openstack.org', port=rcs.DEFAULT_PORT, key_filename='key',
- username='user')
-
- @mock.patch('paramiko.SSHClient')
- def test_setup_error(self, mock_client_cons):
- mock_client = mock.Mock()
- mock_client_cons.return_value = mock_client
-
- mock_connect = mock.Mock()
- mock_client.connect = mock_connect
- mock_connect.side_effect = Exception
-
- gerrit = rcs.Gerrit('gerrit://review.openstack.org')
- self.assertRaises(rcs.RcsException, gerrit.setup,
- username='user', key_filename='key')
-
- mock_connect.assert_called_once_with(
- 'review.openstack.org', port=rcs.DEFAULT_PORT, key_filename='key',
- username='user')
-
- @mock.patch('paramiko.SSHClient')
- @mock.patch('time.time')
- def test_log(self, mock_time, mock_client_cons):
- mock_client = mock.Mock()
- mock_client_cons.return_value = mock_client
-
- mock_exec = mock.Mock()
- mock_client.exec_command = mock_exec
- mock_exec.side_effect = [
- ('', [REVIEW_ONE, REVIEW_END_LINE], ''), # one review and summary
- ('', [REVIEW_END_LINE], ''), # only summary = no more reviews
- ]
-
- gerrit = rcs.Gerrit('uri')
-
- repo = dict(organization='openstack', module='nova')
- branch = 'master'
- last_retrieval_time = 1444000000
- mock_time.return_value = 1444333333
- records = list(gerrit.log(repo, branch, last_retrieval_time))
-
- self.assertEqual(1, len(records))
- self.assertEqual('229382', records[0]['number'])
-
- mock_client.exec_command.assert_has_calls([
- mock.call('gerrit query --all-approvals --patch-sets '
- '--format JSON project:\'openstack/nova\' branch:master '
- 'limit:100 age:0s'),
- mock.call('gerrit query --all-approvals --patch-sets '
- '--format JSON project:\'openstack/nova\' branch:master '
- 'limit:100 age:111111s'),
- ])
-
- @mock.patch('paramiko.SSHClient')
- def test_log_old_reviews(self, mock_client_cons):
- mock_client = mock.Mock()
- mock_client_cons.return_value = mock_client
-
- mock_exec = mock.Mock()
- mock_client.exec_command = mock_exec
- mock_exec.side_effect = [
- ('', [REVIEW_ONE, REVIEW_END_LINE], ''), # one review and summary
- ('', [REVIEW_END_LINE], ''), # only summary = no more reviews
- ]
-
- gerrit = rcs.Gerrit('uri')
-
- repo = dict(organization='openstack', module='nova')
- branch = 'master'
- last_retrieval_time = 1445000000
- records = list(gerrit.log(repo, branch, last_retrieval_time,
- status='merged', grab_comments=True))
-
- self.assertEqual(0, len(records))
-
- mock_client.exec_command.assert_has_calls([
- mock.call('gerrit query --all-approvals --patch-sets '
- '--format JSON project:\'openstack/nova\' branch:master '
- 'limit:100 age:0s status:merged --comments'),
- ])
-
- @mock.patch('paramiko.SSHClient')
- @mock.patch('time.time')
- def test_log_error_tolerated(self, mock_time, mock_client_cons):
- mock_client = mock.Mock()
- mock_client_cons.return_value = mock_client
-
- mock_exec = mock.Mock()
- mock_client.exec_command = mock_exec
- mock_exec.side_effect = [
- Exception,
- ('', [REVIEW_ONE, REVIEW_END_LINE], ''), # one review and summary
- Exception,
- ('', [REVIEW_END_LINE], ''), # only summary = no more reviews
- ]
-
- gerrit = rcs.Gerrit('uri')
-
- repo = dict(organization='openstack', module='nova')
- branch = 'master'
- last_retrieval_time = 1444000000
- mock_time.return_value = 1444333333
- records = list(gerrit.log(repo, branch, last_retrieval_time))
-
- self.assertEqual(1, len(records))
- self.assertEqual('229382', records[0]['number'])
-
- mock_client.exec_command.assert_has_calls([
- mock.call('gerrit query --all-approvals --patch-sets '
- '--format JSON project:\'openstack/nova\' branch:master '
- 'limit:100 age:0s'),
- mock.call('gerrit query --all-approvals --patch-sets '
- '--format JSON project:\'openstack/nova\' branch:master '
- 'limit:100 age:111111s'),
- ])
-
- @mock.patch('paramiko.SSHClient')
- @mock.patch('time.time')
- def test_log_error_fatal(self, mock_time, mock_client_cons):
- mock_client = mock.Mock()
- mock_client_cons.return_value = mock_client
-
- mock_exec = mock.Mock()
- mock_client.exec_command = mock_exec
- mock_exec.side_effect = [Exception] * rcs.SSH_ERRORS_LIMIT
-
- gerrit = rcs.Gerrit('uri')
-
- repo = dict(organization='openstack', module='nova')
- branch = 'master'
- last_retrieval_time = 1444000000
- mock_time.return_value = 1444333333
-
- try:
- list(gerrit.log(repo, branch, last_retrieval_time))
- self.fail('Gerrit.log should raise RcsException, but it did not')
- except rcs.RcsException:
- pass
-
- mock_client.exec_command.assert_has_calls([
- mock.call('gerrit query --all-approvals --patch-sets '
- '--format JSON project:\'openstack/nova\' branch:master '
- 'limit:100 age:0s')] * rcs.SSH_ERRORS_LIMIT)
diff --git a/stackalytics/tests/unit/test_record_processor.py b/stackalytics/tests/unit/test_record_processor.py
deleted file mode 100644
index 5e468c3ca..000000000
--- a/stackalytics/tests/unit/test_record_processor.py
+++ /dev/null
@@ -1,1746 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-
-import mock
-from oslo_config import cfg
-import six
-import testtools
-
-from stackalytics.processor import config
-from stackalytics.processor import record_processor
-from stackalytics.processor import runtime_storage
-from stackalytics.processor import user_processor
-from stackalytics.processor.user_processor import get_company_by_email
-from stackalytics.processor import utils
-
-
-CONF = cfg.CONF
-
-RELEASES = [
- {
- 'release_name': 'prehistory',
- 'end_date': utils.date_to_timestamp('2011-Apr-21')
- },
- {
- 'release_name': 'Diablo',
- 'end_date': utils.date_to_timestamp('2011-Sep-08')
- },
- {
- 'release_name': 'Zoo',
- 'end_date': utils.date_to_timestamp('2035-Sep-08')
- },
-]
-
-REPOS = [
- {
- "branches": ["master"],
- "module": "stackalytics",
- "project_type": "stackforge",
- "uri": "git://git.openstack.org/stackforge/stackalytics.git"
- }
-]
-
-
-class TestRecordProcessor(testtools.TestCase):
- def setUp(self):
- super(TestRecordProcessor, self).setUp()
- self.read_json_from_uri_patch = mock.patch(
- 'stackalytics.processor.utils.read_json_from_uri')
- self.read_launchpad = self.read_json_from_uri_patch.start()
- self.lp_profile_by_launchpad_id_patch = mock.patch(
- 'stackalytics.processor.launchpad_utils.'
- '_lp_profile_by_launchpad_id')
- self.lp_profile_by_launchpad_id = (
- self.lp_profile_by_launchpad_id_patch.start())
- self.lp_profile_by_launchpad_id.return_value = None
- self.lp_profile_by_email_patch = mock.patch(
- 'stackalytics.processor.launchpad_utils._lp_profile_by_email')
- self.lp_profile_by_email = (
- self.lp_profile_by_email_patch.start())
- self.lp_profile_by_email.return_value = None
- CONF.register_opts(config.CONNECTION_OPTS + config.PROCESSOR_OPTS)
-
- def tearDown(self):
- super(TestRecordProcessor, self).tearDown()
- self.read_json_from_uri_patch.stop()
- self.lp_profile_by_launchpad_id_patch.stop()
- self.lp_profile_by_email_patch.stop()
-
- # get_company_by_email
-
- def test_get_company_by_email_mapped(self):
- record_processor_inst = self.make_record_processor(
- companies=[{'company_name': 'IBM', 'domains': ['ibm.com']}]
- )
- email = 'jdoe@ibm.com'
- res = get_company_by_email(record_processor_inst.domains_index, email)
- self.assertEqual('IBM', res)
-
- def test_get_company_by_email_with_long_suffix_mapped(self):
- record_processor_inst = self.make_record_processor(
- companies=[{'company_name': 'NEC', 'domains': ['nec.co.jp']}]
- )
- email = 'man@mxw.nes.nec.co.jp'
- res = get_company_by_email(record_processor_inst.domains_index, email)
- self.assertEqual('NEC', res)
-
- def test_get_company_by_email_with_long_suffix_mapped_2(self):
- record_processor_inst = self.make_record_processor(
- companies=[{'company_name': 'NEC',
- 'domains': ['nec.co.jp', 'nec.com']}]
- )
- email = 'man@mxw.nes.nec.com'
- res = get_company_by_email(record_processor_inst.domains_index, email)
- self.assertEqual('NEC', res)
-
- def test_get_company_by_email_not_mapped(self):
- record_processor_inst = self.make_record_processor()
- email = 'foo@boo.com'
- res = get_company_by_email(record_processor_inst.domains_index, email)
- self.assertIsNone(res)
-
- # commit processing
-
- def test_process_commit_existing_user(self):
- record_processor_inst = self.make_record_processor(
- users=[
- {
- 'user_id': 'john_doe',
- 'launchpad_id': 'john_doe',
- 'user_name': 'John Doe',
- 'emails': ['johndoe@gmail.com', 'johndoe@nec.co.jp'],
- 'companies': [
- {'company_name': '*independent',
- 'end_date': 1234567890},
- {'company_name': 'NEC',
- 'end_date': 0},
- ]
- }
- ])
-
- processed_commit = list(record_processor_inst.process(
- generate_commits(author_email='johndoe@gmail.com',
- author_name='John Doe')))[0]
-
- expected_commit = {
- 'user_id': 'john_doe',
- 'author_email': 'johndoe@gmail.com',
- 'author_name': 'John Doe',
- 'company_name': 'NEC',
- }
-
- self.assertRecordsMatch(expected_commit, processed_commit)
-
- def test_process_commit_existing_user_old_job(self):
- record_processor_inst = self.make_record_processor(
- users=[
- {
- 'user_id': 'john_doe',
- 'launchpad_id': 'john_doe',
- 'user_name': 'John Doe',
- 'emails': ['johndoe@gmail.com', 'johndoe@nec.co.jp'],
- 'companies': [
- {'company_name': '*independent',
- 'end_date': 1234567890},
- {'company_name': 'NEC',
- 'end_date': 0},
- ]
- }
- ])
-
- processed_commit = list(record_processor_inst.process(
- generate_commits(author_email='johndoe@gmail.com',
- author_name='John Doe',
- date=1000000000)))[0]
-
- expected_commit = {
- 'user_id': 'john_doe',
- 'author_email': 'johndoe@gmail.com',
- 'author_name': 'John Doe',
- 'company_name': '*independent',
- }
-
- self.assertRecordsMatch(expected_commit, processed_commit)
-
- def test_process_commit_existing_user_new_email_known_company(self):
- # User is known to LP, his email is new to us, and maps to other
- # company. Should return other company instead of those mentioned
- # in user profile
- record_processor_inst = self.make_record_processor(
- users=[
- {'user_id': 'john_doe',
- 'launchpad_id': 'john_doe',
- 'user_name': 'John Doe',
- 'emails': ['johndoe@nec.co.jp'],
- 'companies': [{'company_name': 'NEC', 'end_date': 0}]}
- ],
- companies=[{'company_name': 'IBM', 'domains': ['ibm.com']}],
- lp_info={'johndoe@ibm.com':
- {'name': 'john_doe', 'display_name': 'John Doe'}})
-
- processed_commit = list(record_processor_inst.process(
- generate_commits(author_email='johndoe@ibm.com',
- author_name='John Doe')))[0]
-
- expected_commit = {
- 'user_id': 'john_doe',
- 'author_email': 'johndoe@ibm.com',
- 'author_name': 'John Doe',
- 'company_name': 'IBM',
- }
-
- self.assertRecordsMatch(expected_commit, processed_commit)
- self.assertIn('johndoe@ibm.com', user_processor.load_user(
- record_processor_inst.runtime_storage_inst,
- user_id='john_doe')['emails'])
-
- def test_process_commit_existing_user_new_email_known_company_static(self):
- # User profile is configured in default_data. Email is new to us,
- # and maps to other company. We still use a company specified
- # in the profile
- record_processor_inst = self.make_record_processor(
- users=[
- {'user_id': 'john_doe',
- 'launchpad_id': 'john_doe',
- 'user_name': 'John Doe',
- 'static': True,
- 'emails': ['johndoe@nec.co.jp'],
- 'companies': [{'company_name': 'NEC', 'end_date': 0}]}
- ],
- companies=[{'company_name': 'IBM', 'domains': ['ibm.com']}],
- lp_info={'johndoe@ibm.com':
- {'name': 'john_doe', 'display_name': 'John Doe'}})
-
- processed_commit = list(record_processor_inst.process(
- generate_commits(author_email='johndoe@ibm.com',
- author_name='John Doe')))[0]
-
- expected_commit = {
- 'user_id': 'john_doe',
- 'author_email': 'johndoe@ibm.com',
- 'author_name': 'John Doe',
- 'company_name': 'NEC',
- }
-
- self.assertRecordsMatch(expected_commit, processed_commit)
- self.assertIn('johndoe@ibm.com', user_processor.load_user(
- record_processor_inst.runtime_storage_inst,
- user_id='john_doe')['emails'])
-
- def test_process_commit_existing_user_old_job_not_overridden(self):
- # User is known to LP, his email is new to us, and maps to other
- # company. Have some record with new email, but from the period when
- # he worked for other company. Should return other company as mentioned
- # in profile instead of overriding
- record_processor_inst = self.make_record_processor(
- users=[
- {'user_id': 'john_doe',
- 'launchpad_id': 'john_doe',
- 'user_name': 'John Doe',
- 'emails': ['johndoe@nec.co.jp'],
- 'companies': [{'company_name': 'IBM', 'end_date': 1200000000},
- {'company_name': 'NEC', 'end_date': 0}]}
- ],
- companies=[{'company_name': 'IBM', 'domains': ['ibm.com']},
- {'company_name': 'NEC', 'domains': ['nec.com']}],
- lp_info={'johndoe@nec.com':
- {'name': 'john_doe', 'display_name': 'John Doe'}})
-
- processed_commit = list(record_processor_inst.process(
- generate_commits(author_email='johndoe@nec.com',
- author_name='John Doe',
- date=1000000000)))[0]
-
- expected_commit = {
- 'user_id': 'john_doe',
- 'author_email': 'johndoe@nec.com',
- 'author_name': 'John Doe',
- 'company_name': 'IBM',
- }
-
- self.assertRecordsMatch(expected_commit, processed_commit)
-
- def test_process_commit_existing_user_new_email_unknown_company(self):
- # User is known to LP, but his email is new to us. Should match
- # the user and return company from user profile
- record_processor_inst = self.make_record_processor(
- users=[
- {'user_id': 'john_doe',
- 'launchpad_id': 'john_doe',
- 'user_name': 'John Doe',
- 'emails': ['johndoe@nec.co.jp'],
- 'companies': [{'company_name': 'NEC', 'end_date': 0}]}
- ],
- companies=[{'company_name': 'IBM', 'domains': ['ibm.com']}],
- lp_info={'johndoe@gmail.com':
- {'name': 'john_doe', 'display_name': 'John Doe'}})
-
- processed_commit = list(record_processor_inst.process(
- generate_commits(author_email='johndoe@gmail.com',
- author_name='John Doe')))[0]
-
- expected_commit = {
- 'user_id': 'john_doe',
- 'author_email': 'johndoe@gmail.com',
- 'author_name': 'John Doe',
- 'company_name': 'NEC',
- }
-
- self.assertRecordsMatch(expected_commit, processed_commit)
- self.assertIn('johndoe@gmail.com', user_processor.load_user(
- record_processor_inst.runtime_storage_inst,
- user_id='john_doe')['emails'])
-
- def test_process_commit_existing_user_new_email_known_company_update(self):
- record_processor_inst = self.make_record_processor(
- users=[
- {'user_id': 'john_doe',
- 'launchpad_id': 'john_doe',
- 'user_name': 'John Doe',
- 'emails': ['johndoe@gmail.com'],
- 'companies': [{'company_name': '*independent',
- 'end_date': 0}]}
- ],
- companies=[{'company_name': 'IBM', 'domains': ['ibm.com']}],
- lp_info={'johndoe@ibm.com':
- {'name': 'john_doe', 'display_name': 'John Doe'}})
-
- processed_commit = list(record_processor_inst.process(
- generate_commits(author_email='johndoe@ibm.com',
- author_name='John Doe')))[0]
-
- expected_commit = {
- 'user_id': 'john_doe',
- 'author_email': 'johndoe@ibm.com',
- 'author_name': 'John Doe',
- 'company_name': 'IBM',
- }
-
- self.assertRecordsMatch(expected_commit, processed_commit)
- user = user_processor.load_user(
- record_processor_inst.runtime_storage_inst, user_id='john_doe')
- self.assertIn('johndoe@gmail.com', user['emails'])
- self.assertEqual('IBM', user['companies'][0]['company_name'],
- message='User affiliation should be updated')
-
- def test_process_commit_new_user(self):
- # User is known to LP, but new to us
- # Should add new user and set company depending on email
- record_processor_inst = self.make_record_processor(
- companies=[{'company_name': 'IBM', 'domains': ['ibm.com']}],
- lp_info={'johndoe@ibm.com':
- {'name': 'john_doe', 'display_name': 'John Doe'}})
-
- processed_commit = list(record_processor_inst.process(
- generate_commits(author_email='johndoe@ibm.com',
- author_name='John Doe')))[0]
-
- expected_commit = {
- 'user_id': 'john_doe',
- 'author_email': 'johndoe@ibm.com',
- 'author_name': 'John Doe',
- 'company_name': 'IBM',
- }
-
- self.assertRecordsMatch(expected_commit, processed_commit)
- user = user_processor.load_user(
- record_processor_inst.runtime_storage_inst, user_id='john_doe')
- self.assertIn('johndoe@ibm.com', user['emails'])
- self.assertEqual('IBM', user['companies'][0]['company_name'])
-
- def test_process_commit_new_user_unknown_to_lb(self):
- # User is new to us and not known to LP
- # Should set user name and empty LPid
- record_processor_inst = self.make_record_processor(
- companies=[{'company_name': 'IBM', 'domains': ['ibm.com']}])
-
- processed_commit = list(record_processor_inst.process(
- generate_commits(author_email='johndoe@ibm.com',
- author_name='John Doe')))[0]
-
- expected_commit = {
- 'launchpad_id': None,
- 'author_email': 'johndoe@ibm.com',
- 'author_name': 'John Doe',
- 'company_name': 'IBM',
- }
-
- self.assertRecordsMatch(expected_commit, processed_commit)
- user = user_processor.load_user(
- record_processor_inst.runtime_storage_inst,
- user_id='johndoe@ibm.com')
- self.assertIn('johndoe@ibm.com', user['emails'])
- self.assertEqual('IBM', user['companies'][0]['company_name'])
- self.assertIsNone(user['launchpad_id'])
-
- def test_process_review_new_user(self):
- # User is known to LP, but new to us
- # Should add new user and set company depending on email
- record_processor_inst = self.make_record_processor(
- companies=[{'company_name': 'IBM', 'domains': ['ibm.com']}],
- lp_info={'johndoe@ibm.com':
- {'name': 'john_doe', 'display_name': 'John Doe'}})
-
- processed_review = list(record_processor_inst.process([
- {'record_type': 'review',
- 'id': 'I1045730e47e9e6ad31fcdfbaefdad77e2f3b2c3e',
- 'subject': 'Fix AttributeError in Keypair._add_details()',
- 'owner': {'name': 'John Doe',
- 'email': 'johndoe@ibm.com',
- 'username': 'John_Doe'},
- 'createdOn': 1379404951,
- 'module': 'nova', 'branch': 'master'}
- ]))[0]
-
- expected_review = {
- 'user_id': 'john_doe',
- 'author_email': 'johndoe@ibm.com',
- 'author_name': 'John Doe',
- 'company_name': 'IBM',
- }
-
- self.assertRecordsMatch(expected_review, processed_review)
- user = user_processor.load_user(
- record_processor_inst.runtime_storage_inst, user_id='john_doe')
- self.assertEqual('John_Doe', user['gerrit_id'])
-
- def test_process_review_without_name(self):
- record_processor_inst = self.make_record_processor()
-
- records = list(record_processor_inst.process([
- {
- 'record_type': 'review',
- 'module': 'sandbox',
- "project": "openstack-dev/sandbox",
- "branch": "master",
- "id": "I8ecdd044c45e93589b42c3166167c30a3bd0ed5f",
- "number": "220784", "subject": "hello,i will commit",
- "owner": {"email": "1102012941@qq.com", "username": "yl"},
- "createdOn": 1441524597,
- "patchSets": [
- {
- "number": "1",
- "revision": "95f73967a869db6748b22c6562fbfc09c08ef910",
- "uploader": {
- "email": "foo@qq.com"},
- "createdOn": 1441524597,
- "author": {
- "email": "1102012941@qq.com"},
- "approvals": [
- {"type": "Code-Review",
- "value": "-1",
- "grantedOn": 1441524601,
- "by": {
- "email": "congressci@gmail.com",
- "username": "vmware-congress-ci"}}]}]}
- ]))
-
- expected_review = {
- 'user_id': 'yl',
- 'author_email': '1102012941@qq.com',
- 'author_name': 'yl',
- 'company_name': '*independent',
- }
-
- expected_patch = {
- 'user_id': 'foo@qq.com',
- 'author_email': 'foo@qq.com',
- 'author_name': 'Anonymous Coward',
- 'company_name': '*independent',
- }
-
- expected_mark = {
- 'user_id': 'vmware-congress-ci',
- 'author_email': 'congressci@gmail.com',
- 'author_name': 'vmware-congress-ci',
- 'company_name': '*independent',
- }
-
- self.assertRecordsMatch(expected_review, records[0])
- self.assertRecordsMatch(expected_patch, records[1])
- self.assertRecordsMatch(expected_mark, records[2])
-
- def generate_bugs(self, status='Confirmed', **kwargs):
- rec = {
- 'record_type': 'bug',
- 'id': 'bug_id',
- 'owner': 'owner',
- 'date_created': 1234567890,
- 'module': 'nova',
- 'status': status
- }
- rec.update(kwargs)
- yield rec
-
- def test_process_bug_not_fixed(self):
- record = self.generate_bugs()
- record_processor_inst = self.make_record_processor()
- bugs = list(record_processor_inst.process(record))
- self.assertEqual(len(bugs), 1)
- self.assertRecordsMatch({
- 'primary_key': 'bugf:bug_id',
- 'record_type': 'bugf',
- 'launchpad_id': 'owner',
- 'date': 1234567890,
- }, bugs[0])
-
- def test_process_bug_fix_committed(self):
- record = self.generate_bugs(status='Fix Committed',
- date_fix_committed=1234567891,
- assignee='assignee')
- record_processor_inst = self.make_record_processor()
- bugs = list(record_processor_inst.process(record))
- self.assertEqual(len(bugs), 2)
- self.assertRecordsMatch({
- 'primary_key': 'bugf:bug_id',
- 'record_type': 'bugf',
- 'launchpad_id': 'owner',
- 'date': 1234567890,
- }, bugs[0])
- self.assertRecordsMatch({
- 'primary_key': 'bugr:bug_id',
- 'record_type': 'bugr',
- 'launchpad_id': 'assignee',
- 'date': 1234567891,
- }, bugs[1])
-
- def test_process_bug_fix_released(self):
- record = self.generate_bugs(status='Fix Released',
- date_fix_committed=1234567891,
- date_fix_released=1234567892,
- assignee='assignee')
- record_processor_inst = self.make_record_processor()
- bugs = list(record_processor_inst.process(record))
- self.assertEqual(len(bugs), 2)
- self.assertRecordsMatch({
- 'primary_key': 'bugf:bug_id',
- 'record_type': 'bugf',
- 'launchpad_id': 'owner',
- 'date': 1234567890,
- }, bugs[0])
- self.assertRecordsMatch({
- 'primary_key': 'bugr:bug_id',
- 'record_type': 'bugr',
- 'launchpad_id': 'assignee',
- 'date': 1234567891,
- }, bugs[1])
-
- def test_process_bug_fix_released_without_committed(self):
- record = self.generate_bugs(status='Fix Released',
- date_fix_released=1234567892,
- assignee='assignee')
- record_processor_inst = self.make_record_processor()
- bugs = list(record_processor_inst.process(record))
- self.assertEqual(len(bugs), 2)
- self.assertRecordsMatch({
- 'primary_key': 'bugf:bug_id',
- 'record_type': 'bugf',
- 'launchpad_id': 'owner',
- 'date': 1234567890,
- }, bugs[0])
- self.assertRecordsMatch({
- 'primary_key': 'bugr:bug_id',
- 'record_type': 'bugr',
- 'launchpad_id': 'assignee',
- 'date': 1234567892,
- }, bugs[1])
-
- def test_process_bug_fix_committed_without_assignee(self):
- record = self.generate_bugs(status='Fix Committed',
- date_fix_committed=1234567891)
- record_processor_inst = self.make_record_processor()
- bugs = list(record_processor_inst.process(record))
- self.assertEqual(len(bugs), 2)
- self.assertRecordsMatch({
- 'primary_key': 'bugf:bug_id',
- 'record_type': 'bugf',
- 'launchpad_id': 'owner',
- 'date': 1234567890,
- }, bugs[0])
- self.assertRecordsMatch({
- 'primary_key': 'bugr:bug_id',
- 'record_type': 'bugr',
- 'launchpad_id': '*unassigned',
- 'date': 1234567891,
- }, bugs[1])
-
- # process records complex scenarios
-
- def test_process_blueprint_one_draft_spawned_lp_doesnt_know_user(self):
- # In: blueprint record
- # LP doesn't know user
- # Out: blueprint-draft record
- # new user profile created
- record_processor_inst = self.make_record_processor()
-
- processed_records = list(record_processor_inst.process([
- {'record_type': 'bp',
- 'id': 'mod:blueprint',
- 'self_link': 'http://launchpad.net/blueprint',
- 'owner': 'john_doe',
- 'date_created': 1234567890}
- ]))
-
- self.assertRecordsMatch(
- {'record_type': 'bpd',
- 'launchpad_id': 'john_doe',
- 'author_name': 'john_doe',
- 'company_name': '*independent'},
- processed_records[0])
-
- user = user_processor.load_user(
- record_processor_inst.runtime_storage_inst, user_id='john_doe')
- self.assertEqual({
- 'seq': 1,
- 'user_id': 'john_doe',
- 'launchpad_id': 'john_doe',
- 'user_name': 'john_doe',
- 'emails': [],
- 'companies': [{'company_name': '*independent', 'end_date': 0}]
- }, user)
-
- def test_process_blueprint_one_draft_spawned_lp_knows_user(self):
- # In: blueprint record
- # LP knows user
- # Out: blueprint-draft record
- # new user profile created, name is taken from LP profile
- record_processor_inst = self.make_record_processor(
- lp_user_name={
- 'john_doe': {'name': 'john_doe', 'display_name': 'John Doe'}})
-
- processed_records = list(record_processor_inst.process([
- {'record_type': 'bp',
- 'id': 'mod:blueprint',
- 'self_link': 'http://launchpad.net/blueprint',
- 'owner': 'john_doe',
- 'date_created': 1234567890}
- ]))
-
- self.assertRecordsMatch(
- {'record_type': 'bpd',
- 'launchpad_id': 'john_doe',
- 'author_name': 'John Doe',
- 'company_name': '*independent'},
- processed_records[0])
-
- user = user_processor.load_user(
- record_processor_inst.runtime_storage_inst, user_id='john_doe')
- self.assertEqual({
- 'seq': 1,
- 'user_id': 'john_doe',
- 'launchpad_id': 'john_doe',
- 'user_name': 'John Doe',
- 'emails': [],
- 'companies': [{'company_name': '*independent', 'end_date': 0}]
- }, user)
-
- def test_process_blueprint_then_review(self):
- record_processor_inst = self.make_record_processor(
- lp_user_name={
- 'john_doe': {'name': 'john_doe', 'display_name': 'John Doe'}})
-
- processed_records = list(record_processor_inst.process([
- {'record_type': 'bp',
- 'id': 'mod:blueprint',
- 'self_link': 'http://launchpad.net/blueprint',
- 'owner': 'john_doe',
- 'date_created': 1234567890},
- {'record_type': 'review',
- 'id': 'I1045730e47e9e6ad31fcdfbaefdad77e2f3b2c3e',
- 'subject': 'Fix AttributeError in Keypair._add_details()',
- 'owner': {'name': 'John Doe',
- 'email': 'john_doe@gmail.com',
- 'username': 'john_doe'},
- 'createdOn': 1379404951,
- 'module': 'nova', 'branch': 'master'}
- ]))
-
- self.assertRecordsMatch(
- {'record_type': 'bpd',
- 'user_id': 'john_doe',
- 'author_name': 'John Doe',
- 'company_name': '*independent'},
- processed_records[0])
-
- self.assertRecordsMatch(
- {'record_type': 'review',
- 'user_id': 'john_doe',
- 'author_name': 'John Doe',
- 'author_email': 'john_doe@gmail.com',
- 'company_name': '*independent'},
- processed_records[1])
-
- user = {'seq': 1,
- 'user_id': 'john_doe',
- 'launchpad_id': 'john_doe',
- 'gerrit_id': 'john_doe',
- 'user_name': 'John Doe',
- 'emails': ['john_doe@gmail.com'],
- 'companies': [{'company_name': '*independent', 'end_date': 0}]}
- self.assertEqual(user, user_processor.load_user(
- record_processor_inst.runtime_storage_inst,
- user_id='john_doe'))
- self.assertEqual(user, user_processor.load_user(
- record_processor_inst.runtime_storage_inst,
- email='john_doe@gmail.com'))
-
- def test_process_blueprint_then_commit(self):
- record_processor_inst = self.make_record_processor(
- lp_user_name={
- 'john_doe': {'name': 'john_doe', 'display_name': 'John Doe'}},
- lp_info={'john_doe@gmail.com':
- {'name': 'john_doe', 'display_name': 'John Doe'}})
-
- processed_records = list(record_processor_inst.process([
- {'record_type': 'bp',
- 'id': 'mod:blueprint',
- 'self_link': 'http://launchpad.net/blueprint',
- 'owner': 'john_doe',
- 'date_created': 1234567890},
- {'record_type': 'commit',
- 'commit_id': 'de7e8f297c193fb310f22815334a54b9c76a0be1',
- 'author_name': 'John Doe',
- 'author_email': 'john_doe@gmail.com',
- 'date': 1234567890,
- 'lines_added': 25,
- 'lines_deleted': 9,
- 'release_name': 'havana'}
- ]))
-
- self.assertRecordsMatch(
- {'record_type': 'bpd',
- 'launchpad_id': 'john_doe',
- 'author_name': 'John Doe',
- 'company_name': '*independent'},
- processed_records[0])
-
- self.assertRecordsMatch(
- {'record_type': 'commit',
- 'user_id': 'john_doe',
- 'author_name': 'John Doe',
- 'author_email': 'john_doe@gmail.com',
- 'company_name': '*independent'},
- processed_records[1])
-
- user = {'seq': 1,
- 'user_id': 'john_doe',
- 'launchpad_id': 'john_doe',
- 'user_name': 'John Doe',
- 'emails': ['john_doe@gmail.com'],
- 'companies': [{'company_name': '*independent', 'end_date': 0}]}
- self.assertEqual(user, user_processor.load_user(
- record_processor_inst.runtime_storage_inst,
- user_id='john_doe'))
- self.assertEqual(user, user_processor.load_user(
- record_processor_inst.runtime_storage_inst,
- email='john_doe@gmail.com'))
-
- def test_process_review_then_blueprint(self):
- record_processor_inst = self.make_record_processor(
- lp_user_name={
- 'john_doe': {'name': 'john_doe', 'display_name': 'John Doe'}})
-
- processed_records = list(record_processor_inst.process([
- {'record_type': 'review',
- 'id': 'I1045730e47e9e6ad31fcdfbaefdad77e2f3b2c3e',
- 'subject': 'Fix AttributeError in Keypair._add_details()',
- 'owner': {'name': 'John Doe',
- 'email': 'john_doe@gmail.com',
- 'username': 'john_doe'},
- 'createdOn': 1379404951,
- 'module': 'nova', 'branch': 'master'},
- {'record_type': 'bp',
- 'id': 'mod:blueprint',
- 'self_link': 'http://launchpad.net/blueprint',
- 'owner': 'john_doe',
- 'date_created': 1234567890}
- ]))
-
- self.assertRecordsMatch(
- {'record_type': 'review',
- 'user_id': 'john_doe',
- 'author_name': 'John Doe',
- 'author_email': 'john_doe@gmail.com',
- 'company_name': '*independent'},
- processed_records[0])
-
- self.assertRecordsMatch(
- {'record_type': 'bpd',
- 'user_id': 'john_doe',
- 'author_name': 'John Doe',
- 'company_name': '*independent'},
- processed_records[1])
-
- user = {'seq': 1,
- 'user_id': 'john_doe',
- 'launchpad_id': 'john_doe',
- 'gerrit_id': 'john_doe',
- 'user_name': 'John Doe',
- 'emails': ['john_doe@gmail.com'],
- 'companies': [{'company_name': '*independent', 'end_date': 0}]}
- self.assertEqual(user, user_processor.load_user(
- record_processor_inst.runtime_storage_inst,
- user_id='john_doe'))
- self.assertEqual(user, user_processor.load_user(
- record_processor_inst.runtime_storage_inst,
- email='john_doe@gmail.com'))
-
- def test_create_member(self):
- member_record = {'member_id': '123456789',
- 'member_name': 'John Doe',
- 'member_uri': 'http://www.openstack.org/community'
- '/members/profile/123456789',
- 'date_joined': 'August 01, 2012 ',
- 'company_draft': 'Mirantis'}
-
- record_processor_inst = self.make_record_processor()
- result_member = next(record_processor_inst._process_member(
- member_record))
-
- self.assertEqual(result_member['primary_key'], 'member:123456789')
- self.assertEqual(result_member['date'], utils.member_date_to_timestamp(
- 'August 01, 2012 '))
- self.assertEqual(result_member['author_name'], 'John Doe')
- self.assertEqual(result_member['company_name'], 'Mirantis')
-
- result_user = user_processor.load_user(
- record_processor_inst.runtime_storage_inst,
- member_id='123456789')
-
- self.assertEqual(result_user['user_name'], 'John Doe')
- self.assertEqual(result_user['company_name'], 'Mirantis')
- self.assertEqual(result_user['companies'],
- [{'company_name': 'Mirantis', 'end_date': 0}])
-
- def test_update_member(self):
- member_record = {'member_id': '123456789',
- 'member_name': 'John Doe',
- 'member_uri': 'http://www.openstack.org/community'
- '/members/profile/123456789',
- 'date_joined': 'August 01, 2012 ',
- 'company_draft': 'Mirantis'}
-
- record_processor_inst = self.make_record_processor()
-
- updated_member_record = member_record
- updated_member_record['member_name'] = 'Bill Smith'
- updated_member_record['company_draft'] = 'Rackspace'
-
- result_member = next(record_processor_inst._process_member(
- updated_member_record))
- self.assertEqual(result_member['author_name'], 'Bill Smith')
- self.assertEqual(result_member['company_name'], 'Rackspace')
-
- result_user = user_processor.load_user(
- record_processor_inst.runtime_storage_inst,
- member_id='123456789')
-
- self.assertEqual(result_user['user_name'], 'Bill Smith')
- self.assertEqual(result_user['companies'],
- [{'company_name': 'Rackspace', 'end_date': 0}])
-
- def test_process_email_then_review(self):
- # it is expected that the user profile will contain email and
- # gerrit id, while LP id will be None
- record_processor_inst = self.make_record_processor()
-
- list(record_processor_inst.process([
- {'record_type': 'email',
- 'message_id': '
',
- 'author_email': 'john_doe@gmail.com',
- 'subject': 'hello, world!',
- 'body': 'lorem ipsum',
- 'date': 1234567890},
- {'record_type': 'review',
- 'id': 'I1045730e47e9e6ad31fcdfbaefdad77e2f3b2c3e',
- 'subject': 'Fix AttributeError in Keypair._add_details()',
- 'owner': {'name': 'John Doe',
- 'email': 'john_doe@gmail.com',
- 'username': 'john_doe'},
- 'createdOn': 1379404951,
- 'module': 'nova', 'branch': 'master'}
- ]))
-
- user = {'seq': 1,
- 'user_id': 'john_doe@gmail.com',
- 'gerrit_id': 'john_doe',
- 'user_name': 'John Doe',
- 'emails': ['john_doe@gmail.com'],
- 'companies': [{'company_name': '*independent', 'end_date': 0}]}
- self.assertEqual(user, user_processor.load_user(
- record_processor_inst.runtime_storage_inst,
- email='john_doe@gmail.com'))
- self.assertEqual(user, user_processor.load_user(
- record_processor_inst.runtime_storage_inst,
- gerrit_id='john_doe'))
-
- def test_process_email_then_review_gerrit_id_same_as_launchpad_id(self):
- # it is expected that the user profile will contain email, LP id and
- # gerrit id
- record_processor_inst = self.make_record_processor(
- lp_user_name={'john_doe': {'name': 'john_doe',
- 'display_name': 'John Doe'}}
- )
-
- list(record_processor_inst.process([
- {'record_type': 'email',
- 'message_id': '',
- 'author_email': 'john_doe@gmail.com',
- 'subject': 'hello, world!',
- 'body': 'lorem ipsum',
- 'date': 1234567890},
- {'record_type': 'review',
- 'id': 'I1045730e47e9e6ad31fcdfbaefdad77e2f3b2c3e',
- 'subject': 'Fix AttributeError in Keypair._add_details()',
- 'owner': {'name': 'John Doe',
- 'email': 'john_doe@gmail.com',
- 'username': 'john_doe'},
- 'createdOn': 1379404951,
- 'module': 'nova', 'branch': 'master'}
- ]))
-
- user = {'seq': 1,
- 'user_id': 'john_doe',
- 'launchpad_id': 'john_doe',
- 'gerrit_id': 'john_doe',
- 'user_name': 'John Doe',
- 'emails': ['john_doe@gmail.com'],
- 'companies': [{'company_name': '*independent', 'end_date': 0}]}
- self.assertEqual(user, user_processor.load_user(
- record_processor_inst.runtime_storage_inst,
- email='john_doe@gmail.com'))
- self.assertEqual(user, user_processor.load_user(
- record_processor_inst.runtime_storage_inst,
- user_id='john_doe'))
- self.assertEqual(user, user_processor.load_user(
- record_processor_inst.runtime_storage_inst,
- gerrit_id='john_doe'))
-
- def test_process_commit_then_review_with_different_email(self):
- record_processor_inst = self.make_record_processor(
- lp_info={'john_doe@gmail.com':
- {'name': 'john_doe', 'display_name': 'John Doe'}},
- lp_user_name={'john_doe': {'name': 'john_doe',
- 'display_name': 'John Doe'}},
- companies=[{'company_name': 'IBM', 'domains': ['ibm.com']}])
-
- list(record_processor_inst.process([
- {'record_type': 'commit',
- 'commit_id': 'de7e8f297c193fb310f22815334a54b9c76a0be1',
- 'author_name': 'John Doe', 'author_email': 'john_doe@gmail.com',
- 'date': 1234567890, 'lines_added': 25, 'lines_deleted': 9,
- 'release_name': 'havana'},
- {'record_type': 'review',
- 'id': 'I1045730e47e9e6ad31fcdfbaefdad77e2f3b2c3e',
- 'subject': 'Fix AttributeError in Keypair._add_details()',
- 'owner': {'name': 'Bill Smith', 'email': 'bill@smith.to',
- 'username': 'bsmith'},
- 'createdOn': 1379404951, 'module': 'nova', 'branch': 'master',
- 'patchSets': [
- {'number': '1',
- 'revision': '4d8984e92910c37b7d101c1ae8c8283a2e6f4a76',
- 'ref': 'refs/changes/16/58516/1',
- 'uploader': {'name': 'Bill Smith', 'email': 'bill@smith.to',
- 'username': 'bsmith'},
- 'createdOn': 1385470730,
- 'approvals': [
- {'type': 'Code-Review', 'description': 'Code Review',
- 'value': '1', 'grantedOn': 1385478464,
- 'by': {'name': 'John Doe', 'email': 'john_doe@ibm.com',
- 'username': 'john_doe'}}]}]}
- ]))
- user = {'seq': 1,
- 'user_id': 'john_doe',
- 'launchpad_id': 'john_doe',
- 'user_name': 'John Doe',
- 'emails': ['john_doe@ibm.com', 'john_doe@gmail.com'],
- 'companies': [{'company_name': 'IBM', 'end_date': 0}]}
- self.assertUsersMatch(user, user_processor.load_user(
- record_processor_inst.runtime_storage_inst,
- user_id='john_doe'))
- self.assertUsersMatch(user, user_processor.load_user(
- record_processor_inst.runtime_storage_inst,
- email='john_doe@gmail.com'))
- self.assertUsersMatch(user, user_processor.load_user(
- record_processor_inst.runtime_storage_inst,
- email='john_doe@ibm.com'))
-
- def test_merge_users(self):
- record_processor_inst = self.make_record_processor(
- lp_user_name={
- 'john_doe': {'name': 'john_doe', 'display_name': 'John Doe'}
- },
- companies=[{'company_name': 'IBM', 'domains': ['ibm.com']}],
- )
- runtime_storage_inst = record_processor_inst.runtime_storage_inst
-
- runtime_storage_inst.set_records(record_processor_inst.process([
- {'record_type': 'bp',
- 'id': 'mod:blueprint',
- 'self_link': 'http://launchpad.net/blueprint',
- 'owner': 'john_doe',
- 'date_created': 1234567890},
- {'record_type': 'email',
- 'message_id': '',
- 'author_email': 'john_doe@ibm.com', 'author_name': 'John Doe',
- 'subject': 'hello, world!',
- 'body': 'lorem ipsum',
- 'date': 1234567890},
- {'record_type': 'review',
- 'id': 'I1045730e47e9e6ad31fcdfbaefdad77e2f3b2c3e',
- 'subject': 'Fix AttributeError in Keypair._add_details()',
- 'owner': {'name': 'John Doe',
- 'email': 'john_doe@ibm.com',
- 'username': 'john_doe'},
- 'createdOn': 1379404951,
- 'module': 'nova', 'branch': 'master'}
- ]))
-
- record_processor_inst.post_processing({})
-
- user = {'seq': 2,
- 'user_id': 'john_doe',
- 'launchpad_id': 'john_doe',
- 'gerrit_id': 'john_doe',
- 'user_name': 'John Doe',
- 'emails': ['john_doe@ibm.com'],
- 'companies': [{'company_name': 'IBM', 'end_date': 0}]}
- runtime_storage_inst = record_processor_inst.runtime_storage_inst
- self.assertEqual(2, runtime_storage_inst.get_by_key('user:count'))
- self.assertIsNone(user_processor.load_user(
- runtime_storage_inst, 1))
- self.assertEqual(user, user_processor.load_user(
- runtime_storage_inst, 2))
- self.assertEqual(user, user_processor.load_user(
- runtime_storage_inst, user_id='john_doe'))
- self.assertEqual(user, user_processor.load_user(
- runtime_storage_inst, email='john_doe@ibm.com'))
- self.assertEqual(user, user_processor.load_user(
- runtime_storage_inst, gerrit_id='john_doe'))
-
- # all records should have the same user_id and company name
- for record in runtime_storage_inst.get_all_records():
- self.assertEqual('john_doe', record['user_id'],
- message='Record %s' % record['primary_key'])
- self.assertEqual('IBM', record['company_name'],
- message='Record %s' % record['primary_key'])
-
- def test_core_user_guess(self):
- record_processor_inst = self.make_record_processor(
- lp_user_name={
- 'john_doe': {'name': 'john_doe', 'display_name': 'John Doe'},
- 'homer': {'name': 'homer', 'display_name': 'Homer Simpson'},
- },
- companies=[{'company_name': 'IBM', 'domains': ['ibm.com']}],
- )
- runtime_storage_inst = record_processor_inst.runtime_storage_inst
-
- timestamp = int(time.time())
- runtime_storage_inst.set_records(record_processor_inst.process([
- {'record_type': 'review',
- 'id': 'I1045730e47e9e6ad31fcdfbaefdad77e2f3b2c3e',
- 'subject': 'Fix AttributeError in Keypair._add_details()',
- 'owner': {'name': 'John Doe',
- 'email': 'john_doe@ibm.com',
- 'username': 'john_doe'},
- 'createdOn': timestamp,
- 'module': 'nova',
- 'branch': 'master',
- 'patchSets': [
- {'number': '1',
- 'revision': '4d8984e92910c37b7d101c1ae8c8283a2e6f4a76',
- 'ref': 'refs/changes/16/58516/1',
- 'uploader': {
- 'name': 'Bill Smith',
- 'email': 'bill@smith.to',
- 'username': 'bsmith'},
- 'createdOn': timestamp,
- 'approvals': [
- {'type': 'Code-Review', 'description': 'Code Review',
- 'value': '2', 'grantedOn': timestamp,
- 'by': {
- 'name': 'John Doe',
- 'email': 'john_doe@ibm.com',
- 'username': 'john_doe'}},
- {'type': 'Code-Review', 'description': 'Code Review',
- 'value': '-1', 'grantedOn': timestamp - 1, # differ
- 'by': {
- 'name': 'Homer Simpson',
- 'email': 'hsimpson@gmail.com',
- 'username': 'homer'}}
- ]
- }]}
- ]))
-
- record_processor_inst.post_processing({})
-
- user_1 = {'seq': 1, 'user_id': 'john_doe',
- 'launchpad_id': 'john_doe', 'user_name': 'John Doe',
- 'emails': ['john_doe@ibm.com'],
- 'core': [('nova', 'master')],
- 'companies': [{'company_name': 'IBM', 'end_date': 0}]}
- user_2 = {'seq': 3, 'user_id': 'homer',
- 'launchpad_id': 'homer', 'user_name': 'Homer Simpson',
- 'emails': ['hsimpson@gmail.com'],
- 'companies': [{'company_name': '*independent',
- 'end_date': 0}]}
- runtime_storage_inst = record_processor_inst.runtime_storage_inst
- self.assertUsersMatch(user_1, user_processor.load_user(
- runtime_storage_inst, user_id='john_doe'))
- self.assertUsersMatch(user_2, user_processor.load_user(
- runtime_storage_inst, user_id='homer'))
-
- def test_process_commit_with_coauthors(self):
- record_processor_inst = self.make_record_processor(
- lp_info={'jimi.hendrix@openstack.com':
- {'name': 'jimi', 'display_name': 'Jimi Hendrix'},
- 'tupac.shakur@openstack.com':
- {'name': 'tupac', 'display_name': 'Tupac Shakur'},
- 'bob.dylan@openstack.com':
- {'name': 'bob', 'display_name': 'Bob Dylan'}})
- processed_commits = list(record_processor_inst.process([
- {'record_type': 'commit',
- 'commit_id': 'de7e8f297c193fb310f22815334a54b9c76a0be1',
- 'author_name': 'Jimi Hendrix',
- 'author_email': 'jimi.hendrix@openstack.com', 'date': 1234567890,
- 'lines_added': 25, 'lines_deleted': 9, 'release_name': 'havana',
- 'coauthor': [{'author_name': 'Tupac Shakur',
- 'author_email': 'tupac.shakur@openstack.com'},
- {'author_name': 'Bob Dylan',
- 'author_email': 'bob.dylan@openstack.com'}]}]))
-
- self.assertEqual(3, len(processed_commits))
-
- self.assertRecordsMatch({
- 'user_id': 'tupac',
- 'author_email': 'tupac.shakur@openstack.com',
- 'author_name': 'Tupac Shakur',
- }, processed_commits[0])
- self.assertRecordsMatch({
- 'user_id': 'jimi',
- 'author_email': 'jimi.hendrix@openstack.com',
- 'author_name': 'Jimi Hendrix',
- }, processed_commits[2])
- self.assertEqual('tupac',
- processed_commits[0]['coauthor'][0]['user_id'])
- self.assertEqual('bob',
- processed_commits[0]['coauthor'][1]['user_id'])
- self.assertEqual('jimi',
- processed_commits[0]['coauthor'][2]['user_id'])
-
- def test_process_commit_with_coauthors_no_dup_of_author(self):
- record_processor_inst = self.make_record_processor(
- lp_info={'jimi.hendrix@openstack.com':
- {'name': 'jimi', 'display_name': 'Jimi Hendrix'},
- 'bob.dylan@openstack.com':
- {'name': 'bob', 'display_name': 'Bob Dylan'}})
- processed_commits = list(record_processor_inst.process([
- {'record_type': 'commit',
- 'commit_id': 'de7e8f297c193fb310f22815334a54b9c76a0be1',
- 'author_name': 'Jimi Hendrix',
- 'author_email': 'jimi.hendrix@openstack.com', 'date': 1234567890,
- 'lines_added': 25, 'lines_deleted': 9, 'release_name': 'havana',
- 'coauthor': [{'author_name': 'Jimi Hendrix',
- 'author_email': 'jimi.hendrix@openstack.com'},
- {'author_name': 'Bob Dylan',
- 'author_email': 'bob.dylan@openstack.com'}]}]))
-
- self.assertEqual(2, len(processed_commits))
-
- self.assertEqual('jimi',
- processed_commits[0]['coauthor'][0]['user_id'])
- self.assertEqual('bob',
- processed_commits[0]['coauthor'][1]['user_id'])
-
- # record post-processing
-
- def test_blueprint_mention_count(self):
- record_processor_inst = self.make_record_processor()
- runtime_storage_inst = record_processor_inst.runtime_storage_inst
-
- runtime_storage_inst.set_records(record_processor_inst.process([
- {'record_type': 'bp',
- 'id': 'mod:blueprint',
- 'self_link': 'http://launchpad.net/blueprint',
- 'owner': 'john_doe',
- 'date_created': 1234567890},
- {'record_type': 'bp',
- 'id': 'mod:ignored',
- 'self_link': 'http://launchpad.net/ignored',
- 'owner': 'john_doe',
- 'date_created': 1234567890},
- {'record_type': 'email',
- 'message_id': '',
- 'author_email': 'john_doe@gmail.com', 'author_name': 'John Doe',
- 'subject': 'hello, world!',
- 'body': 'lorem ipsum',
- 'date': 1234567890,
- 'blueprint_id': ['mod:blueprint']},
- {'record_type': 'email',
- 'message_id': '',
- 'author_email': 'john_doe@gmail.com', 'author_name': 'John Doe',
- 'subject': 'hello, world!',
- 'body': 'lorem ipsum',
- 'date': 1234567895,
- 'blueprint_id': ['mod:blueprint', 'mod:invalid']},
- ]))
- record_processor_inst.post_processing({})
-
- bp1 = runtime_storage_inst.get_by_primary_key('bpd:mod:blueprint')
- self.assertEqual(2, bp1['mention_count'])
- self.assertEqual(1234567895, bp1['mention_date'])
-
- bp2 = runtime_storage_inst.get_by_primary_key('bpd:mod:ignored')
- self.assertEqual(0, bp2['mention_count'])
- self.assertEqual(0, bp2['mention_date'])
-
- email = runtime_storage_inst.get_by_primary_key('')
- self.assertIn('mod:blueprint', email['blueprint_id'])
- self.assertNotIn('mod:invalid', email['blueprint_id'])
-
- def test_mark_disagreement(self):
- record_processor_inst = self.make_record_processor(
- users=[
- {'user_id': 'john_doe',
- 'launchpad_id': 'john_doe',
- 'user_name': 'John Doe',
- 'emails': ['john_doe@ibm.com'],
- 'core': [('nova', 'master')],
- 'companies': [{'company_name': 'IBM', 'end_date': 0}]}
- ],
- )
- timestamp = int(time.time())
- runtime_storage_inst = record_processor_inst.runtime_storage_inst
- runtime_storage_inst.set_records(record_processor_inst.process([
- {'record_type': 'review',
- 'id': 'I1045730e47e9e6ad31fcdfbaefdad77e2f3b2c3e',
- 'subject': 'Fix AttributeError in Keypair._add_details()',
- 'owner': {'name': 'John Doe',
- 'email': 'john_doe@ibm.com',
- 'username': 'john_doe'},
- 'createdOn': timestamp,
- 'module': 'nova',
- 'branch': 'master',
- 'status': 'NEW',
- 'patchSets': [
- {'number': '1',
- 'revision': '4d8984e92910c37b7d101c1ae8c8283a2e6f4a76',
- 'ref': 'refs/changes/16/58516/1',
- 'uploader': {
- 'name': 'Bill Smith',
- 'email': 'bill@smith.to',
- 'username': 'bsmith'},
- 'createdOn': timestamp,
- 'approvals': [
- {'type': 'Code-Review', 'description': 'Code Review',
- 'value': '2', 'grantedOn': timestamp - 1,
- 'by': {
- 'name': 'Homer Simpson',
- 'email': 'hsimpson@gmail.com',
- 'username': 'homer'}},
- {'type': 'Code-Review', 'description': 'Code Review',
- 'value': '-2', 'grantedOn': timestamp,
- 'by': {
- 'name': 'John Doe',
- 'email': 'john_doe@ibm.com',
- 'username': 'john_doe'}}
- ]
- },
- {'number': '2',
- 'revision': '4d8984e92910c37b7d101c1ae8c8283a2e6f4a76',
- 'ref': 'refs/changes/16/58516/1',
- 'uploader': {
- 'name': 'Bill Smith',
- 'email': 'bill@smith.to',
- 'username': 'bsmith'},
- 'createdOn': timestamp + 1,
- 'approvals': [
- {'type': 'Code-Review', 'description': 'Code Review',
- 'value': '1', 'grantedOn': timestamp + 2,
- 'by': {
- 'name': 'Homer Simpson',
- 'email': 'hsimpson@gmail.com',
- 'username': 'homer'}},
- {'type': 'Code-Review', 'description': 'Code Review',
- 'value': '-1', 'grantedOn': timestamp + 3,
- 'by': {
- 'name': 'Bart Simpson',
- 'email': 'bsimpson@gmail.com',
- 'username': 'bart'}},
- {'type': 'Code-Review', 'description': 'Code Review',
- 'value': '2', 'grantedOn': timestamp + 4,
- 'by': {
- 'name': 'John Doe',
- 'email': 'john_doe@ibm.com',
- 'username': 'john_doe'}}
- ]
- }
- ]}
- ]))
- record_processor_inst.post_processing({})
-
- marks = list([r for r in runtime_storage_inst.get_all_records()
- if r['record_type'] == 'mark'])
-
- homer_mark = next(six.moves.filter(
- lambda x: x['date'] == (timestamp - 1), marks), None)
- self.assertTrue(homer_mark.get('disagreement'),
- msg='Disagreement: core set -2 after +2')
-
- homer_mark = next(six.moves.filter(
- lambda x: x['date'] == (timestamp + 2), marks), None)
- self.assertFalse(homer_mark.get('disagreement'),
- msg='No disagreement: core set +2 after +1')
-
- bart_mark = next(six.moves.filter(
- lambda x: x['date'] == (timestamp + 3), marks), None)
- self.assertTrue(bart_mark.get('disagreement'),
- msg='Disagreement: core set +2 after -1')
-
- def test_commit_merge_date(self):
- record_processor_inst = self.make_record_processor()
- runtime_storage_inst = record_processor_inst.runtime_storage_inst
-
- runtime_storage_inst.set_records(record_processor_inst.process([
- {'record_type': 'commit',
- 'commit_id': 'de7e8f2',
- 'change_id': ['I104573'],
- 'author_name': 'John Doe',
- 'author_email': 'john_doe@gmail.com',
- 'date': 1234567890,
- 'lines_added': 25,
- 'lines_deleted': 9,
- 'module': u'stackalytics',
- 'release_name': 'havana'},
- {'record_type': 'review',
- 'id': 'I104573',
- 'subject': 'Fix AttributeError in Keypair._add_details()',
- 'owner': {'name': 'John Doe',
- 'email': 'john_doe@gmail.com',
- 'username': 'john_doe'},
- 'createdOn': 1385478465,
- 'lastUpdated': 1385490000,
- 'status': 'MERGED',
- 'module': 'nova', 'branch': 'master'},
- ]))
- record_processor_inst.post_processing({})
-
- commit = runtime_storage_inst.get_by_primary_key('de7e8f2')
- self.assertEqual(1385490000, commit['date'])
-
- def test_commit_module_alias(self):
- record_processor_inst = self.make_record_processor()
- runtime_storage_inst = record_processor_inst.runtime_storage_inst
-
- with mock.patch('stackalytics.processor.utils.load_repos') as patch:
- patch.return_value = [{'module': 'sahara', 'aliases': ['savanna']}]
- runtime_storage_inst.set_records(record_processor_inst.process([
- {'record_type': 'commit',
- 'commit_id': 'de7e8f2',
- 'change_id': ['I104573'],
- 'author_name': 'John Doe',
- 'author_email': 'john_doe@gmail.com',
- 'date': 1234567890,
- 'lines_added': 25,
- 'lines_deleted': 9,
- 'module': u'savanna',
- 'release_name': 'havana'},
- {'record_type': 'review',
- 'id': 'I104573',
- 'subject': 'Fix AttributeError in Keypair._add_details()',
- 'owner': {'name': 'John Doe',
- 'email': 'john_doe@gmail.com',
- 'username': 'john_doe'},
- 'createdOn': 1385478465,
- 'lastUpdated': 1385490000,
- 'status': 'MERGED',
- 'module': 'nova', 'branch': 'master'},
- ]))
- record_processor_inst.post_processing({})
-
- commit = runtime_storage_inst.get_by_primary_key('de7e8f2')
- self.assertEqual('sahara', commit['module'])
-
- # update records
-
- def _generate_record_commit(self):
- yield {'commit_id': u'0afdc64bfd041b03943ceda7849c4443940b6053',
- 'lines_added': 9,
- 'module': u'stackalytics',
- 'record_type': 'commit',
- 'message': u'Closes bug 1212953\n\nChange-Id: '
- u'I33f0f37b6460dc494abf2520dc109c9893ace9e6\n',
- 'subject': u'Fixed affiliation of Edgar and Sumit',
- 'loc': 10,
- 'user_id': u'john_doe',
- 'primary_key': u'0afdc64bfd041b03943ceda7849c4443940b6053',
- 'author_email': u'jdoe@super.no',
- 'company_name': u'SuperCompany',
- 'record_id': 6,
- 'lines_deleted': 1,
- 'week': 2275,
- 'blueprint_id': None,
- 'bug_id': u'1212953',
- 'files_changed': 1,
- 'author_name': u'John Doe',
- 'date': 1376737923,
- 'launchpad_id': u'john_doe',
- 'branches': set([u'master']),
- 'change_id': u'I33f0f37b6460dc494abf2520dc109c9893ace9e6',
- 'release': u'havana'}
-
- # mail processing
-
- def test_process_mail(self):
- record_processor_inst = self.make_record_processor(
- users=[
- {
- 'user_id': 'john_doe',
- 'launchpad_id': 'john_doe',
- 'user_name': 'John Doe',
- 'emails': ['johndoe@gmail.com', 'johndoe@nec.co.jp'],
- 'companies': [
- {'company_name': 'NEC', 'end_date': 0},
- ]
- }
- ],
- repos=[{"module": "stackalytics"}]
- )
-
- processed_commit = list(record_processor_inst.process(
- generate_emails(
- author_email='johndoe@gmail.com',
- author_name='John Doe',
- subject='[openstack-dev] [Stackalytics] Configuration files')
- ))[0]
-
- expected_commit = {
- 'user_id': 'john_doe',
- 'author_email': 'johndoe@gmail.com',
- 'author_name': 'John Doe',
- 'company_name': 'NEC',
- 'module': 'stackalytics',
- }
-
- self.assertRecordsMatch(expected_commit, processed_commit)
-
- def test_process_mail_guessed(self):
- record_processor_inst = self.make_record_processor(
- users=[
- {
- 'user_id': 'john_doe',
- 'launchpad_id': 'john_doe',
- 'user_name': 'John Doe',
- 'emails': ['johndoe@gmail.com', 'johndoe@nec.co.jp'],
- 'companies': [
- {'company_name': 'NEC', 'end_date': 0},
- ]
- }
- ],
- repos=[{'module': 'nova'}, {'module': 'neutron'}]
- )
-
- processed_commit = list(record_processor_inst.process(
- generate_emails(
- author_email='johndoe@gmail.com',
- author_name='John Doe',
- subject='[openstack-dev] [Neutron] [Nova] Integration issue')
- ))[0]
-
- expected_commit = {
- 'user_id': 'john_doe',
- 'author_email': 'johndoe@gmail.com',
- 'author_name': 'John Doe',
- 'company_name': 'NEC',
- 'module': 'neutron',
- }
-
- self.assertRecordsMatch(expected_commit, processed_commit)
-
- def test_process_mail_guessed_module_in_body_override(self):
- record_processor_inst = self.make_record_processor(
- users=[
- {
- 'user_id': 'john_doe',
- 'launchpad_id': 'john_doe',
- 'user_name': 'John Doe',
- 'emails': ['johndoe@gmail.com', 'johndoe@nec.co.jp'],
- 'companies': [
- {'company_name': 'NEC', 'end_date': 0},
- ]
- }
- ],
- repos=[{'module': 'nova'}, {'module': 'neutron'}]
- )
-
- processed_commit = list(record_processor_inst.process(
- generate_emails(
- author_email='johndoe@gmail.com',
- author_name='John Doe',
- module='nova',
- subject='[openstack-dev] [neutron] Comments/questions on the')
- ))[0]
-
- expected_commit = {
- 'user_id': 'john_doe',
- 'author_email': 'johndoe@gmail.com',
- 'author_name': 'John Doe',
- 'company_name': 'NEC',
- 'module': 'neutron',
- }
-
- self.assertRecordsMatch(expected_commit, processed_commit)
-
- def test_process_mail_guessed_module_in_body(self):
- record_processor_inst = self.make_record_processor(
- users=[
- {
- 'user_id': 'john_doe',
- 'launchpad_id': 'john_doe',
- 'user_name': 'John Doe',
- 'emails': ['johndoe@gmail.com', 'johndoe@nec.co.jp'],
- 'companies': [
- {'company_name': 'NEC', 'end_date': 0},
- ]
- }
- ],
- repos=[{'module': 'nova'}, {'module': 'neutron'}]
- )
-
- processed_commit = list(record_processor_inst.process(
- generate_emails(
- author_email='johndoe@gmail.com',
- author_name='John Doe',
- module='nova',
- subject='[openstack-dev] Comments/questions on the')
- ))[0]
-
- expected_commit = {
- 'user_id': 'john_doe',
- 'author_email': 'johndoe@gmail.com',
- 'author_name': 'John Doe',
- 'company_name': 'NEC',
- 'module': 'nova',
- }
-
- self.assertRecordsMatch(expected_commit, processed_commit)
-
- def test_process_mail_unmatched(self):
- record_processor_inst = self.make_record_processor(
- users=[
- {
- 'user_id': 'john_doe',
- 'launchpad_id': 'john_doe',
- 'user_name': 'John Doe',
- 'emails': ['johndoe@gmail.com', 'johndoe@nec.co.jp'],
- 'companies': [
- {'company_name': 'NEC', 'end_date': 0},
- ]
- }
- ],
- repos=[{'module': 'nova'}, {'module': 'neutron'}]
- )
-
- processed_commit = list(record_processor_inst.process(
- generate_emails(
- author_email='johndoe@gmail.com',
- author_name='John Doe',
- subject='[openstack-dev] Comments/questions on the')
- ))[0]
-
- expected_commit = {
- 'user_id': 'john_doe',
- 'author_email': 'johndoe@gmail.com',
- 'author_name': 'John Doe',
- 'company_name': 'NEC',
- 'module': 'unknown',
- }
-
- self.assertRecordsMatch(expected_commit, processed_commit)
-
- def test_get_modules(self):
- record_processor_inst = self.make_record_processor()
- with mock.patch('stackalytics.processor.utils.load_repos') as patch:
- patch.return_value = [{'module': 'nova'},
- {'module': 'python-novaclient'},
- {'module': 'neutron'},
- {'module': 'sahara', 'aliases': ['savanna']}]
- modules, module_alias_map = record_processor_inst._get_modules()
- self.assertEqual(set(['nova', 'neutron', 'sahara', 'savanna']),
- set(modules))
- self.assertEqual({'savanna': 'sahara'}, module_alias_map)
-
- def test_guess_module(self):
- record_processor_inst = self.make_record_processor()
- with mock.patch('stackalytics.processor.utils.load_repos') as patch:
- patch.return_value = [{'module': 'sahara', 'aliases': ['savanna']}]
- record = {'subject': '[savanna] T'}
- record_processor_inst._guess_module(record)
- self.assertEqual({'subject': '[savanna] T', 'module': 'sahara'},
- record)
-
- def assertRecordsMatch(self, expected, actual):
- for key, value in six.iteritems(expected):
- self.assertEqual(value, actual.get(key),
- 'Values for key %s do not match' % key)
-
- def assertUsersMatch(self, expected, actual):
- self.assertIsNotNone(actual, 'User should not be None')
- match = True
- for key, value in six.iteritems(expected):
- if key == 'emails':
- match = (set(value) == set(actual.get(key)))
- else:
- match = (value == actual.get(key))
-
- self.assertTrue(match, 'User %s should match %s' % (actual, expected))
-
- # Helpers
-
- def make_record_processor(self, users=None, companies=None, releases=None,
- repos=None, lp_info=None, lp_user_name=None):
- rp = record_processor.RecordProcessor(make_runtime_storage(
- users=users, companies=companies, releases=releases, repos=repos))
-
- if lp_info is not None:
- self.lp_profile_by_email.side_effect = (
- lambda x: lp_info.get(x))
-
- if lp_user_name is not None:
- self.lp_profile_by_launchpad_id.side_effect = (
- lambda x: lp_user_name.get(x))
-
- return rp
-
-
-def generate_commits(author_name='John Doe', author_email='johndoe@gmail.com',
- date=1999999999):
- yield {
- 'record_type': 'commit',
- 'commit_id': 'de7e8f297c193fb310f22815334a54b9c76a0be1',
- 'author_name': author_name,
- 'author_email': author_email,
- 'date': date,
- 'lines_added': 25,
- 'lines_deleted': 9,
- 'release_name': 'havana',
- }
-
-
-def generate_emails(author_name='John Doe', author_email='johndoe@gmail.com',
- date=1999999999, subject='[openstack-dev]', module=None):
- yield {
- 'record_type': 'email',
- 'message_id': 'de7e8f297c193fb310f22815334a54b9c76a0be1',
- 'author_name': author_name,
- 'author_email': author_email,
- 'date': date,
- 'subject': subject,
- 'module': module,
- 'body': 'lorem ipsum',
- }
-
-
-def make_runtime_storage(users=None, companies=None, releases=None,
- repos=None):
- runtime_storage_cache = {}
- runtime_storage_record_keys = []
-
- def get_by_key(key):
- if key == 'companies':
- return _make_companies(companies or [
- {"company_name": "*independent", "domains": [""]},
- ])
- elif key == 'users':
- return _make_users(users or [])
- elif key == 'releases':
- return releases or RELEASES
- elif key == 'repos':
- return repos or REPOS
- else:
- return runtime_storage_cache.get(key)
-
- def set_by_key(key, value):
- runtime_storage_cache[key] = value
-
- def delete_by_key(key):
- del runtime_storage_cache[key]
-
- def inc_user_count():
- count = runtime_storage_cache.get('user:count') or 0
- count += 1
- runtime_storage_cache['user:count'] = count
- return count
-
- def get_all_users():
- for n in six.moves.range(
- 0, (runtime_storage_cache.get('user:count') or 0) + 1):
- u = runtime_storage_cache.get('user:%s' % n)
- if u:
- yield u
-
- def set_records(records_iterator):
- for record in records_iterator:
- runtime_storage_cache[record['primary_key']] = record
- runtime_storage_record_keys.append(record['primary_key'])
-
- def get_all_records():
- return [runtime_storage_cache[key]
- for key in runtime_storage_record_keys]
-
- def get_by_primary_key(primary_key):
- return runtime_storage_cache.get(primary_key)
-
- rs = mock.Mock(runtime_storage.RuntimeStorage)
- rs.get_by_key = mock.Mock(side_effect=get_by_key)
- rs.set_by_key = mock.Mock(side_effect=set_by_key)
- rs.delete_by_key = mock.Mock(side_effect=delete_by_key)
- rs.inc_user_count = mock.Mock(side_effect=inc_user_count)
- rs.get_all_users = mock.Mock(side_effect=get_all_users)
- rs.set_records = mock.Mock(side_effect=set_records)
- rs.get_all_records = mock.Mock(side_effect=get_all_records)
- rs.get_by_primary_key = mock.Mock(side_effect=get_by_primary_key)
-
- if users:
- for user in users:
- set_by_key('user:%s' % user['user_id'], user)
- if user.get('launchpad_id'):
- set_by_key('user:%s' % user['launchpad_id'], user)
- for email in user.get('emails') or []:
- set_by_key('user:%s' % email, user)
-
- return rs
-
-
-def _make_users(users):
- users_index = {}
- for user in users:
- if 'user_id' in user:
- users_index[user['user_id']] = user
- if 'launchpad_id' in user:
- users_index[user['launchpad_id']] = user
- for email in user['emails']:
- users_index[email] = user
- return users_index
-
-
-def _make_companies(companies):
- domains_index = {}
- for company in companies:
- for domain in company['domains']:
- domains_index[domain] = company['company_name']
- return domains_index
diff --git a/stackalytics/tests/unit/test_user_processor.py b/stackalytics/tests/unit/test_user_processor.py
deleted file mode 100644
index d35ebc99d..000000000
--- a/stackalytics/tests/unit/test_user_processor.py
+++ /dev/null
@@ -1,139 +0,0 @@
-# Copyright (c) 2015 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import testtools
-
-from stackalytics.processor import user_processor
-from stackalytics.processor import utils
-
-
-class TestUserProcessor(testtools.TestCase):
-
- def test_update_user(self):
- user = {
- "launchpad_id": "user",
- "companies": [
- {
- "company_name": "Rackspace",
- "end_date": "2011-Nov-20"
- },
- {
- "company_name": "IBM",
- "end_date": None
- }
- ],
- "user_name": "John Smith",
- "emails": ["john@smith.com"]
- }
-
- stored_user = {
- "launchpad_id": "user",
- "companies": [
- {
- "company_name": "Rackspace",
- "end_date": "2011-Nov-20"
- },
- {
- "company_name": "IBM",
- "end_date": None
- }
- ],
- "user_name": "Johnny",
- "emails": ["john@smith.com", "mapped_email@gmail.com"],
- "static": True
- }
-
- updated_user = user_processor.update_user_profile(stored_user, user)
-
- # merge emails from profile with those discovered by Stackalytics
- self.assertEqual(set(stored_user['emails']),
- set(updated_user['emails']))
- # name from the profile has higher priority over mined
- self.assertEqual(user['user_name'], updated_user['user_name'])
- # static flag must present
- self.assertTrue(updated_user.get('static'))
-
- def test_update_user_unknown_user(self):
- user = {
- "launchpad_id": "user",
- "companies": [
- {
- "company_name": "Rackspace",
- "end_date": "2011-Nov-20"
- },
- {
- "company_name": "IBM",
- "end_date": None
- }
- ],
- "user_name": "John Smith",
- "emails": ["john@smith.com"]
- }
-
- stored_user = None
-
- updated_user = user_processor.update_user_profile(stored_user, user)
- self.assertTrue(updated_user.get('static'))
-
- def test_are_users_same(self):
- users = [
- dict(seq=1),
- dict(seq=1),
- dict(seq=1),
- ]
- self.assertTrue(user_processor.are_users_same(users))
-
- def test_are_users_same_none(self):
- users = [
- {},
- {},
- ]
- self.assertFalse(user_processor.are_users_same(users))
-
- def test_are_users_not_same(self):
- users = [
- dict(seq=1),
- dict(seq=2),
- dict(seq=1),
- ]
- self.assertFalse(user_processor.are_users_same(users))
-
- def test_are_users_not_same_2(self):
- users = [
- dict(seq=1),
- dict(seq=1),
- {}
- ]
- self.assertFalse(user_processor.are_users_same(users))
-
- def test_resolve_companies_aliases(self):
- domains_index = {
- utils.normalize_company_name('IBM India'): 'IBM',
- utils.normalize_company_name('IBM Japan'): 'IBM',
- }
- user = [
- dict(company_name='IBM India', end_date=1234567890),
- dict(company_name='IBM Japan', end_date=2234567890),
- dict(company_name='Intel', end_date=0),
- ]
-
- observed = user_processor.resolve_companies_aliases(
- domains_index, user)
-
- expected = [
- dict(company_name='IBM', end_date=2234567890),
- dict(company_name='Intel', end_date=0),
- ]
- self.assertEqual(expected, observed)
diff --git a/stackalytics/tests/unit/test_utils.py b/stackalytics/tests/unit/test_utils.py
deleted file mode 100644
index 77e9b1333..000000000
--- a/stackalytics/tests/unit/test_utils.py
+++ /dev/null
@@ -1,197 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import mock
-import testtools
-
-from stackalytics.processor import utils
-
-
-class TestUtils(testtools.TestCase):
-
- def _test_one_range(self, start, end, step):
- elements = set()
- for chunk in utils.make_range(start, end, step):
- for item in chunk:
- self.assertNotIn(item, elements)
- elements.add(item)
-
- self.assertSetEqual(set(range(start, end)), elements)
-
- def test_make_range_0_10_1(self):
- self._test_one_range(0, 10, 1)
-
- def test_make_range_0_10_3(self):
- self._test_one_range(0, 10, 3)
-
- def test_make_range_3_5_4(self):
- self._test_one_range(3, 5, 4)
-
- def test_make_range_5_26_10(self):
- self._test_one_range(5, 26, 10)
-
- def test_email_valid(self):
- self.assertTrue(utils.check_email_validity('pupkin@gmail.com'))
- self.assertTrue(utils.check_email_validity('v.pup_kin2@ntt.co.jp'))
-
- def test_email_invalid(self):
- self.assertFalse(utils.check_email_validity('pupkin@localhost'))
- self.assertFalse(utils.check_email_validity('222@some.(trash)'))
-
- def test_unwrap(self):
- original = 'Lorem ipsum. Dolor\nsit amet.\n Lorem\n ipsum.\ndolor!\n'
- expected = 'Lorem ipsum. Dolor sit amet.\n Lorem\n ipsum.\ndolor!'
-
- self.assertEqual(expected, utils.unwrap_text(original))
-
- def test_format_text_split_long_link(self):
- original = ('https://blueprints.launchpad.net/stackalytics/+spec/'
- 'stackalytics-core')
- expected = ('https://blueprints.launchpad.net'
- '/stackalytics/+spec/stackalytics-core')
-
- self.assertEqual(expected, utils.format_text(original))
-
- def test_format_text_split_full_class_path(self):
- original = 'tests.unit.benchmark.scenarios.test_base'
- expected = ('tests.unit.benchmark.'
- 'scenarios.test_base')
-
- self.assertEqual(expected, utils.format_text(original))
-
- def test_format_text_split_full_class_path_middle_line(self):
- original = 'some text tests.unit.benchmark.scenarios.test_base wide'
- expected = ('some text tests.unit.benchmark.'
- 'scenarios.test_base wide')
-
- self.assertEqual(expected, utils.format_text(original))
-
- def test_add_index(self):
- sequence = [{'name': 'A'}, {'name': 'B'}, {'name': 'C'}]
- expected = [{'index': 1, 'name': 'A'}, {'index': 2, 'name': 'B'},
- {'index': 3, 'name': 'C'}]
- self.assertEqual(expected, utils.add_index(sequence))
-
- def test_add_index_with_filter(self):
- sequence = [{'name': 'A'}, {'name': 'B'}, {'name': 'C'}]
- expected = [{'index': 0, 'name': 'A'}, {'index': '', 'name': 'B'},
- {'index': 1, 'name': 'C'}]
- self.assertEqual(expected, utils.add_index(
- sequence, start=0, item_filter=lambda x: x['name'] != 'B'))
-
- def test_keep_safe_chars(self):
- self.assertEqual('somemoretext',
- utils.keep_safe_chars('some more text'))
- self.assertEqual(u'(unicode)',
- utils.keep_safe_chars(u'(unicode \u0423) '))
-
- def test_normalize_company_name(self):
- company_names = ['EMC Corporation', 'Abc, corp..', 'Mirantis IT.',
- 'Red Hat, Inc.', 'abc s.r.o. ABC', '2s.r.o. co',
- 'AL.P.B L.P. s.r.o. s.r.o. C ltd.']
- correct_normalized_company_names = ['emc', 'abc', 'mirantis',
- 'redhat', 'abcabc', '2sro',
- 'alpbc']
- normalized_company_names = [utils.normalize_company_name(name)
- for name in company_names]
-
- self.assertEqual(normalized_company_names,
- correct_normalized_company_names)
-
- def test_validate_lp_display_name(self):
- profile = dict(name='johnny', display_name='John Smith')
- utils.validate_lp_display_name(profile)
- self.assertEqual('John Smith', profile['display_name'])
-
- profile = dict(name='johnny', display_name='')
- utils.validate_lp_display_name(profile)
- self.assertEqual('johnny', profile['display_name'])
-
- profile = None
- utils.validate_lp_display_name(profile)
- self.assertIsNone(profile)
-
- def test_pipeline_processor(self):
- counter = dict(n=0)
- consumed = []
- log = mock.Mock()
-
- def get_all_items():
- for i in range(5):
- counter['n'] += 1
- yield i
-
- def single_pass_uno():
- log('single_pass_uno:begin')
-
- def pass_1(s):
- yield s
-
- yield pass_1
-
- log('single_pass_uno:end')
-
- def single_pass_duo():
- log('single_pass_duo:begin')
-
- def pass_1(s):
- yield s + 10
-
- yield pass_1
-
- log('single_pass_duo:end')
-
- def double_pass():
- log('double_pass:begin')
- r = set()
-
- def pass_1(s):
- if s % 2:
- r.add(s)
-
- yield pass_1
-
- log('double_pass:middle')
-
- def pass_2(s):
- if s in r:
- yield s * 100
-
- yield pass_2
-
- log('double_pass:end')
-
- def consume(r):
- for x in r:
- consumed.append(x)
-
- processors = [single_pass_uno, double_pass, single_pass_duo]
- pipeline_processor = utils.make_pipeline_processor(processors)
- consume(pipeline_processor(get_all_items))
-
- self.assertEqual(10, counter['n']) # twice by 5 elements
-
- expected = [0, 10, 1, 11, 2, 12, 3, 13, 4, 14, 100, 300]
- self.assertEqual(expected, consumed)
-
- log.assert_has_calls([
- mock.call('single_pass_uno:begin'),
- mock.call('double_pass:begin'),
- mock.call('single_pass_duo:begin'),
- mock.call('single_pass_uno:end'),
- mock.call('double_pass:middle'),
- mock.call('single_pass_duo:end'),
- mock.call('double_pass:end'),
- ])
diff --git a/stackalytics/tests/unit/test_vcs.py b/stackalytics/tests/unit/test_vcs.py
deleted file mode 100644
index 57a4c180d..000000000
--- a/stackalytics/tests/unit/test_vcs.py
+++ /dev/null
@@ -1,225 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import mock
-import testtools
-
-from stackalytics.processor import vcs
-
-
-class TestVcsProcessor(testtools.TestCase):
- def setUp(self):
- super(TestVcsProcessor, self).setUp()
-
- self.repo = {
- 'module': 'dummy',
- 'uri': 'git://git.openstack.org/dummy.git',
- 'releases': []
- }
- self.git = vcs.Git(self.repo, '/tmp')
- self.chdir_patcher = mock.patch('os.chdir')
- self.chdir_patcher.start()
-
- def tearDown(self):
- super(TestVcsProcessor, self).tearDown()
- self.chdir_patcher.stop()
-
- def test_git_log(self):
- with mock.patch('sh.git') as git_mock:
- git_mock.return_value = '''
-commit_id:b5a416ac344160512f95751ae16e6612aefd4a57
-date:1369119386
-author_name:Akihiro MOTOKI
-author_email:motoki@da.jp.nec.com
-subject:Remove class-based import in the code repo
-message:Fixes bug 1167901.
-
-This commit also removes backslashes for line break.
-
-Change-Id: Id26fdfd2af4862652d7270aec132d40662efeb96
-
-diff_stat:
-
- 21 files changed, 340 insertions(+), 408 deletions(-)
-commit_id:5be031f81f76d68c6e4cbaad2247044aca179843
-date:1370975889
-author_name:Monty Taylor
-author_email:mordred@inaugust.com
-subject:Remove explicit distribute depend.
-message:Causes issues with the recent re-merge with setuptools. Advice from
-upstream is to stop doing explicit depends.
-
-Change-Id: I70638f239794e78ba049c60d2001190910a89c90
-
-diff_stat:
-
- 1 file changed, 1 deletion(-)
-commit_id:2dcb4fa4aa1925ffbd90d1cc7556a13a1bc45d1c
-date:1369831203
-author_name:Mark McClain
-author_email:mark.mcclain@dreamhost.com
-subject:add readme for 2.2.2
-message:Fixes bug: 1234567
-Also fixes bug 987654
-Change-Id: Id32a4a72ec1d13992b306c4a38e73605758e26c7
-
-diff_stat:
-
- 1 file changed, 8 insertions(+)
-commit_id:06d321b6b7681b162cd3231b5bdd92b17eb4f401
-date:1369831203
-author_name:John Doe
-author_email:john.doe@dreamhost.com
-subject:add readme for 2.2.2
-message: implements blueprint fix-me.
-Co-Authored-By: Anonymous
-Change-Id: Id32a4a72ec1d13992b306c4a38e73605758e26c7
-
-diff_stat:
-
- 0 files changed
-commit_id:913c86a9d5b6a1b74db36266e996cb4d6073f75b
-date:1369831203
-author_name:Doug Hoffner
-author_email:mark.mcclain@dreamhost.com
-subject:add readme for 2.2.2
-message:Change-Id: Id32a4a72ec1d13992b306c4a38e73605758e26c7
-Co-Authored-By: some friend of mine
-
-diff_stat:
-
- 0 files changed, 0 insertions(+), 0 deletions(-)
-commit_id:2f3103a96c4d234a4fcc0b0211a20308c0d342e7
-date:1397687866
-author_name:James E. Blair
-author_email:jeblair@openstack.org
-subject:Reduce IAD usage by 50%
-message:At provider's request.
-
-Change-Id: I976eaff357bf0ad4bce2a7fd5fe6fd81750276c5
-
-diff_stat:
-commit_id:12811c76f3a8208b36f81e61451ec17d227b4e58
-date:1369831203
-author_name:Jimi Hendrix
-author_email:jimi.hendrix@openstack.com
-subject:adds support off co-authors
-message:Change-Id: Id811c762ec1d13992b306c4a38e7360575e61451
-Co-Authored-By: Tupac Shakur
-Also-By: Bob Dylan
-Also-By: Anonymous
-Also-By: Winnie the Pooh winnie222@openstack.org
-
-diff_stat:
-
- 0 files changed, 0 insertions(+), 0 deletions(-)
-commit_id:d1af9cbe0187e1a65cf1eb46fb1650cf619a7b3a
-date:1369831300
-author_name:Vasya Pupkin
-author_email:vpupkinx@openstack.com
-subject:adds new support of co-authors
-message:Change-Id: I577dfdf7f65a0c883ddbcfda62daf8c5f9c746c1
-Co-Authored-By: Tupac Shakur
-Also: Bob Dylan
-Co-Authored: Anonymous
-Co-Author-By: Anonymous2
-co-authored-by: brian.tully@hp.com
-Co-Author: Winnie the Pooh winnie222@openstack.org
-
-diff_stat:
-
- 0 files changed, 0 insertions(+), 0 deletions(-)
- '''
- commits = list(self.git.log('dummy', 'dummy'))
-
- commits_expected = 8
- self.assertEqual(commits_expected, len(commits))
-
- self.assertEqual(21, commits[0]['files_changed'])
- self.assertEqual(340, commits[0]['lines_added'])
- self.assertEqual(408, commits[0]['lines_deleted'])
- self.assertEqual(['1167901'], commits[0]['bug_id'])
-
- self.assertEqual(1, commits[1]['files_changed'])
- self.assertEqual(0, commits[1]['lines_added'])
- self.assertEqual(1, commits[1]['lines_deleted'])
-
- self.assertEqual(1, commits[2]['files_changed'])
- self.assertEqual(8, commits[2]['lines_added'])
- self.assertEqual(0, commits[2]['lines_deleted'])
- self.assertEqual(set(['987654', '1234567']),
- set(commits[2]['bug_id']))
-
- self.assertEqual(0, commits[3]['files_changed'])
- self.assertEqual(0, commits[3]['lines_added'])
- self.assertEqual(0, commits[3]['lines_deleted'])
- self.assertEqual(set(['dummy:fix-me']),
- set(commits[3]['blueprint_id']))
- self.assertNotIn('coauthor', commits[3])
-
- self.assertEqual(0, commits[4]['files_changed'])
- self.assertEqual(0, commits[4]['lines_added'])
- self.assertEqual(0, commits[4]['lines_deleted'])
- self.assertNotIn('coauthor', commits[4])
-
- self.assertEqual('jeblair@openstack.org', commits[5]['author_email'])
- self.assertEqual(0, commits[5]['files_changed'])
- self.assertEqual(0, commits[5]['lines_added'])
- self.assertEqual(0, commits[5]['lines_deleted'])
-
- self.assertIn(
- {'author_name': 'Tupac Shakur',
- 'author_email': 'tupac.shakur@openstack.com'},
- commits[6]['coauthor'])
-
- self.assertIn(
- {'author_name': 'Bob Dylan',
- 'author_email': 'bob.dylan@openstack.com'},
- commits[6]['coauthor'])
-
- self.assertIn(
- {'author_name': 'Winnie the Pooh',
- 'author_email': 'winnie222@openstack.org'},
- commits[6]['coauthor'])
-
- self.assertIn(
- {'author_name': 'Tupac Shakur',
- 'author_email': 'tupac.shakur@openstack.com'},
- commits[7]['coauthor'])
-
- self.assertNotIn(
- {'author_name': 'Bob Dylan',
- 'author_email': 'bob.dylan@openstack.com'},
- commits[7]['coauthor'])
-
- self.assertNotIn(
- {'author_name': 'Anonymous',
- 'author_email': 'correct@email.com'},
- commits[7]['coauthor'])
-
- self.assertNotIn(
- {'author_name': 'Anonymous2',
- 'author_email': 'correct@email2.com'},
- commits[7]['coauthor'])
-
- self.assertIn(
- {'author_name': 'Winnie the Pooh',
- 'author_email': 'winnie222@openstack.org'},
- commits[7]['coauthor'])
-
- self.assertIn(
- {'author_name': '',
- 'author_email': 'brian.tully@hp.com'},
- commits[7]['coauthor'])
diff --git a/stackalytics/tests/unit/test_web_utils.py b/stackalytics/tests/unit/test_web_utils.py
deleted file mode 100644
index 9f60620f6..000000000
--- a/stackalytics/tests/unit/test_web_utils.py
+++ /dev/null
@@ -1,163 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import mock
-import testtools
-
-from stackalytics.dashboard import helpers
-from stackalytics.dashboard import parameters
-
-
-class TestWebUtils(testtools.TestCase):
-
- def test_make_commit_message(self):
- message = '''
-During finish_migration the manager calls initialize_connection but doesn't
-update the block_device_mapping with the potentially new connection_info
-returned.
-
-
-Fixes bug 1076801
-Change-Id: Ie49ccd2138905e178843b375a9b16c3fe572d1db'''
-
- module = 'test'
-
- record = {
- 'message': message,
- 'module': module,
- }
-
- expected = '''\
-During finish_migration the manager calls initialize_connection but doesn't \
-update the block_device_mapping with the potentially new connection_info \
-returned.
-Fixes bug \
-1076801
-''' + (
- 'Change-Id: '
- 'Ie49ccd2138905e178843b375a9b16c3fe572d1db ')
-
- observed = helpers.make_commit_message(record)
-
- self.assertEqual(expected, observed,
- 'Commit message should be processed correctly')
-
- def test_make_commit_message_blueprint_link(self):
- message = '''
-Implemented new driver for Cinder <:
-Implements Blueprint super-driver
-Change-Id: Ie49ccd2138905e178843b375a9b16c3fe572d1db'''
-
- module = 'cinder'
-
- record = {
- 'message': message,
- 'module': module,
- }
-
- expected = '''\
-Implemented new driver for Cinder <:
-Implements Blueprint ''' + (
- 'super-driver ' + '\n' +
- 'Change-Id: '
- 'Ie49ccd2138905e178843b375a9b16c3fe572d1db ')
-
- observed = helpers.make_commit_message(record)
-
- self.assertEqual(expected, observed,
- 'Commit message should be processed correctly')
-
- @mock.patch('stackalytics.dashboard.vault.get_vault')
- @mock.patch('stackalytics.dashboard.vault.get_user_from_runtime_storage')
- def test_make_page_title(self, user_patch, vault_patch):
- def _pt(id, title=None, is_openstack=True):
- return dict(id=id.lower(), title=title or id,
- parent=dict(id='openstack') if is_openstack else None)
-
- user_inst = {'user_name': 'John Doe'}
- module_inst = {'module_group_name': 'neutron'}
-
- self.assertEqual('OpenStack community contribution in all releases',
- helpers.make_page_title(
- _pt('OpenStack'), 'all', None, None, None))
- self.assertEqual('OpenStack community contribution in Havana release',
- helpers.make_page_title(
- _pt('OpenStack'), 'Havana', None, None, None))
- self.assertEqual('Mirantis contribution in OpenStack Havana release',
- helpers.make_page_title(
- _pt('Stackforge'), 'Havana', None, 'Mirantis',
- None))
- self.assertEqual('John Doe contribution in OpenStack Havana release',
- helpers.make_page_title(
- _pt('all'), 'Havana', None, None, user_inst))
- self.assertEqual(
- 'John Doe (Mirantis) contribution to neutron in OpenStack Havana '
- 'release',
- helpers.make_page_title(
- _pt('all'), 'Havana', module_inst, 'Mirantis', user_inst))
- self.assertEqual('Ansible community contribution during OpenStack '
- 'Havana release',
- helpers.make_page_title(
- _pt('Ansible', is_openstack=False),
- 'Havana', None, None, None))
- self.assertEqual('Docker community contribution',
- helpers.make_page_title(
- _pt('Docker', is_openstack=False),
- 'all', None, None, None))
-
- @mock.patch('flask.request')
- @mock.patch('stackalytics.dashboard.parameters.get_default')
- def test_parameters_get_parameter(self, get_default, flask_request):
-
- flask_request.args = mock.Mock()
- flask_request.args.get = mock.Mock(side_effect=lambda x: x)
-
- def make(values=None):
- def f(arg):
- return values.get(arg, None) if values else None
- return f
-
- get_default.side_effect = make()
- flask_request.args.get.side_effect = make({'param': 'foo'})
- self.assertEqual(['foo'], parameters.get_parameter(
- {'param': 'foo'}, 'param'))
-
- flask_request.args.get.side_effect = make({'param': 'foo'})
- self.assertEqual(['foo'], parameters.get_parameter({}, 'param'))
-
- flask_request.args.get.side_effect = make({'param': 'foo'})
- self.assertEqual([], parameters.get_parameter(
- {}, 'other', use_default=False))
-
- flask_request.args.get.side_effect = make({'params': 'foo'})
- self.assertEqual(['foo'], parameters.get_parameter(
- {}, 'param', plural_name='params'))
-
- flask_request.args.get.side_effect = make({})
- get_default.side_effect = make({'param': 'foo'})
- self.assertEqual(['foo'], parameters.get_parameter({}, 'param'))
- self.assertEqual([], parameters.get_parameter({}, 'other'))
-
- def test_filter_bug_title(self):
- bug_title = ('Bug #1459454 in Barbican: "Stored key certificate '
- 'order does not set PK on generated container"')
- expected = ('Stored key certificate order does not set PK '
- 'on generated container')
-
- actual = helpers.filter_bug_title(bug_title)
- self.assertEqual(expected, actual)
diff --git a/stackalytics/version.py b/stackalytics/version.py
deleted file mode 100644
index 9bbc003d3..000000000
--- a/stackalytics/version.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from pbr import version
-
-version_info = version.VersionInfo('stackalytics')
diff --git a/test-requirements.txt b/test-requirements.txt
deleted file mode 100644
index 5e215068a..000000000
--- a/test-requirements.txt
+++ /dev/null
@@ -1,14 +0,0 @@
-# The order of packages is significant, because pip processes them in the order
-# of appearance. Changing the order has an impact on the overall integration
-# process, which may cause wedges in the gate later.
-
-# Hacking already pins down pep8, pyflakes and flake8
-hacking!=0.13.0,<0.14,>=0.12.0 # Apache-2.0
-coverage>=4.0 # Apache-2.0
-fixtures>=3.0.0 # Apache-2.0/BSD
-mock>=2.0 # BSD
-python-subunit>=0.0.18 # Apache-2.0/BSD
-sphinx>=1.5.1 # BSD
-sphinxcontrib-httpdomain # BSD
-stestr>=2.0.0 # Apache-2.0
-testtools>=1.4.0 # MIT
diff --git a/tools/check_openstackids.py b/tools/check_openstackids.py
deleted file mode 100644
index adb9b5eed..000000000
--- a/tools/check_openstackids.py
+++ /dev/null
@@ -1,164 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This utility compares user profiles from default_data.json and
-# OpenStackID service. For every user it prints a plus sign if at least one
-# email is registered in OpenStackID service and dollar sign if user
-# affiliation matches. In the end the overall stats are printed. Full
-# comparison is written into yaml file.
-
-from __future__ import print_function
-
-import functools
-import json
-import sys
-import time
-
-from oslo_config import cfg
-import six
-import yaml
-
-from stackalytics.processor import openstackid_utils
-from stackalytics.processor import user_processor
-from stackalytics.processor import utils
-
-
-def _read_raw_file(file_name):
- if six.PY3:
- opener = functools.partial(open, encoding='utf8')
- else:
- opener = open
- with opener(file_name, 'r') as content_file:
- return content_file.read()
-
-
-def _read_file(file_name):
- return json.loads(_read_raw_file(file_name))
-
-
-def get_domains_index(companies):
- domains_index = {}
- for company in companies:
- for domain in company['domains']:
- domains_index[domain] = company['company_name']
-
- if 'aliases' in company:
- for alias in company['aliases']:
- normalized_alias = utils.normalize_company_name(alias)
- domains_index[normalized_alias] = company['company_name']
- normalized_company_name = utils.normalize_company_name(
- company['company_name'])
- domains_index[normalized_company_name] = company['company_name']
-
- return domains_index
-
-
-def flatten_companies(cs):
- return [{c['company_name']: c['end_date'] or 0} for c in cs]
-
-
-def main():
- default_data = _read_file('etc/default_data.json')
- users = default_data['users']
- domains_index = get_domains_index(default_data['companies'])
-
- user_maps = 0
- email_maps_to_openstack_id = 0
- email_does_not_map_to_openstack_id = 0
- users_whos_email_does_not_map = 0
- name_differs = 0
- users_with_companies_match = 0
-
- recs = []
-
- for idx, user in enumerate(users):
- name = user['user_name']
- affiliation = flatten_companies(user['companies'])
-
- print(idx, name, end='')
-
- ce = []
- umn = 0
- companies_match = True
-
- for email in user['emails']:
- p = openstackid_utils.user_profile_by_email(email)
-
- if p:
- mapped_companies = user_processor.resolve_companies_aliases(
- domains_index, p['companies'])
- email_maps_to_openstack_id += 1
-
- if p['user_name'] != name:
- name_differs += 1
-
- ce.append({email: [p['user_name'],
- flatten_companies(mapped_companies)]})
-
- f = False
- if len(user['companies']) == 1:
- dd_c = user['companies'][0]['company_name']
- mc = [c['company_name'] for c in mapped_companies
- if c['company_name'] != user_processor.INDEPENDENT]
- if len(mc) == 1:
- if dd_c == mc[0]:
- f = True
- companies_match = companies_match and f
- else:
- email_does_not_map_to_openstack_id += 1
- umn += 1
-
- mark = ''
-
- if ce:
- recs.append([name, affiliation, ce])
- user_maps += 1
- mark = '+'
-
- if umn:
- users_whos_email_does_not_map += 1
-
- if ce and companies_match:
- users_with_companies_match += 1
- mark += '$'
-
- print('', mark)
- time.sleep(1.1) # avoid throttling
-
- recs.sort(key=lambda x: x[0])
-
- meta = {
- 'Default data profiles': len(users),
- 'Profiles mapped': user_maps,
- 'Profiles NOT mapped': len(users) - user_maps,
- 'Profiles with emails NOT mapped': users_whos_email_does_not_map,
- 'Emails mapped': email_maps_to_openstack_id,
- 'Emails NOT mapped': email_does_not_map_to_openstack_id,
- 'Names differ': name_differs,
- 'Users with companies MATCH': users_with_companies_match,
- }
- print()
- yaml.safe_dump(meta, sys.stdout, default_flow_style=False)
-
- with open('profile_mapping.yaml', 'w') as fd:
- yaml.safe_dump(recs, fd, default_flow_style=False)
-
-
-if __name__ == '__main__':
- opts = [
- cfg.IntOpt('read-timeout', default=20)
- ]
- cfg.CONF.register_opts(opts)
- cfg.CONF(project='stackalytics')
-
- main()
diff --git a/tools/check_user_profiles.py b/tools/check_user_profiles.py
deleted file mode 100644
index 421b27eee..000000000
--- a/tools/check_user_profiles.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import memcache
-
-
-def check(expected, actual):
- if expected != actual:
- print('Expected: %s\nActual: %s' % (expected, actual))
-
-
-def main():
- m = memcache.Client(['localhost:11211'])
- count = m.get('user:count') + 1
- users = [m.get('user:%d' % seq) for seq in range(count)]
- users = [u for u in users if u]
-
- for u in users:
- user_id = u.get('user_id')
- lp = u.get('launchpad_id')
- g = u.get('gerrit_id')
- emails = u.get('emails')
-
- if user_id:
- check(u, m.get('user:%s' % user_id.encode('utf8')))
-
- if lp:
- check(u, m.get('user:%s' % lp.encode('utf8')))
-
- if g:
- check(u, m.get('user:gerrit:%s' % g.encode('utf8')))
-
- if emails:
- for e in emails:
- check(u, m.get('user:%s' % e.encode('utf8')))
-
-
-if __name__ == '__main__':
- main()
diff --git a/tools/cli_auto_doc.py b/tools/cli_auto_doc.py
deleted file mode 100644
index 3fa4e46f2..000000000
--- a/tools/cli_auto_doc.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# Copyright (c) 2015 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import sys
-
-try:
- import ConfigParser as configparser
-except ImportError:
- import configparser
-
-
-def split_multiline(value):
- value = [element for element in
- (line.strip() for line in value.split('\n'))
- if element]
- return value
-
-
-def get_entry_points(config):
- if 'entry_points' not in config:
- return {}
- return dict((option, split_multiline(value))
- for option, value in config['entry_points'].items())
-
-
-def make(cfg, dest):
- parser = configparser.RawConfigParser()
- parser.read(cfg)
- config = {}
- for section in parser.sections():
- config[section] = dict(parser.items(section))
- entry_points = get_entry_points(config)
-
- console_scripts = entry_points.get('console_scripts')
- if console_scripts:
- for item in console_scripts:
- tool = item.split('=')[0].strip()
- print('Running %s' % tool)
- os.system('%(tool)s --help > %(dest)s/%(tool)s.txt' %
- dict(tool=tool, dest=dest))
-
-
-if len(sys.argv) < 2:
- print('Usage: cli_auto_doc ')
- sys.exit(1)
-
-
-print('Generating docs from help to console tools')
-make(cfg='setup.cfg', dest=sys.argv[1])
diff --git a/tools/install_venv.sh b/tools/install_venv.sh
deleted file mode 100755
index 7acaf8d5a..000000000
--- a/tools/install_venv.sh
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/bash
-
-virtualenv .venv
-./tools/with_venv.sh pip install --upgrade -r requirements.txt
diff --git a/tools/with_venv.sh b/tools/with_venv.sh
deleted file mode 100755
index c8d2940fc..000000000
--- a/tools/with_venv.sh
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/bash
-TOOLS=`dirname $0`
-VENV=$TOOLS/../.venv
-source $VENV/bin/activate && $@
diff --git a/tox.ini b/tox.ini
deleted file mode 100644
index 56db97b76..000000000
--- a/tox.ini
+++ /dev/null
@@ -1,42 +0,0 @@
-[tox]
-envlist = py36,py35,py27,pep8,docs
-minversion = 2.0
-skipsdist = True
-
-[testenv]
-usedevelop = True
-install_command = pip install -U {opts} {packages}
-setenv =
- VIRTUAL_ENV={envdir}
-deps = -r{toxinidir}/test-requirements.txt
-commands = stestr run --slowest {posargs}
-
-[testenv:pep8]
-commands = flake8
-
-[testenv:venv]
-commands = {posargs}
-
-[testenv:cover]
-setenv =
- PYTHON=coverage run --source $project --parallel-mode
-commands =
- stestr run {posargs}
- coverage combine
- coverage html -d cover
- coverage xml -o cover/coverage.xml
-
-[testenv:genconfig]
-commands =
- oslo-config-generator --config-file=config-generator.conf
- python tools/cli_auto_doc.py doc/source/tools
-
-[testenv:docs]
-commands = python setup.py build_sphinx
-
-[flake8]
-# E123, E125 skipped as they are invalid PEP-8
-ignore =
-show-source = true
-builtins = _
-exclude=.venv*,.git,.tox,dist,doc,*lib/python*,*egg,tools,build