diff --git a/.gitignore b/.gitignore index 9e140102f..f6b110e89 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,19 @@ +# +# Copyright 2016 LinkedIn Corp. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + # general *~ *.log @@ -32,5 +48,13 @@ target logs RUNNING_PID +# macOS +.DS_Store *.jar + +#web +public/assets/ember/ +public/assets/fonts/ +web/bower_components/ +web/node_modules/ diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 000000000..1f2e642ad --- /dev/null +++ b/.travis.yml @@ -0,0 +1,11 @@ +language: scala +sudo: false +jdk: + - oraclejdk8 + - oraclejdk7 + +# only build PRs and master (not all branch pushes) +branches: + only: + - master + diff --git a/LICENSE b/LICENSE new file mode 100644 index 000000000..7a4a3ea24 --- /dev/null +++ b/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/NOTICE b/NOTICE new file mode 100644 index 000000000..fc2cea466 --- /dev/null +++ b/NOTICE @@ -0,0 +1,115 @@ +Copyright 2016 LinkedIn Corp. + +Licensed under the Apache License, Version 2.0 (the "License"); you may not +use this file except in compliance with the License. You may obtain a copy of +the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +License for the specific language governing permissions and limitations under +the License. +------------------------------------------------------------------------------ + +This product automatically loads third party code from an external repository(configurable) using Play's build system. +Such third party code is subject to other license terms than as set forth above. Please review the complete list of +dependencies for applicable license terms. + +In addition, such third party code may also depend on and load multiple tiers of dependencies. Please review the +applicable licenses of the additional dependencies. + +------------------------------------------------------------------------------ + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/) + +This product includes/uses Apache Commons IO (https://commons.apache.org/proper/commons-io/) +Copyright (c) 2002-2016 The Apache Software Foundation +License: Apache 2.0 + +This product includes/uses Gson (https://github.com/google/gson) +Copyright (C) 2008 Google Inc. +License: Apache 2.0 + +This product includes/uses Jsoup (https://github.com/jhy/jsoup) +Copyright (c) 2009 - 2015 Jonathan Hedley (jonathan@hedley.net) +License: The MIT License (https://github.com/jhy/jsoup/blob/master/LICENSE) + +This product includes/uses Guava (https://github.com/google/guava) +License: Apache 2.0 + +This product includes/uses Jackson Databind (http://github.com/FasterXML/jackson) +License: Apache 2.0 + +This product includes/uses Apache Hadoop (http://hadoop.apache.org/) +Notice: https://github.com/apache/hadoop/blob/trunk/NOTICE.txt +License: Apache 2.0 + +This product includes/uses Apache Spark (http://spark.apache.org/) +Notice: https://github.com/apache/spark/blob/master/NOTICE +License: Apache 2.0 (https://github.com/apache/spark/blob/master/LICENSE) + +This product includes/uses Play Framework (https://www.playframework.com/) +Copyright (C) 2009-2016 Typesafe Inc. (http://www.typesafe.com) +License: Apache 2.0 + +This product requires the following play dependencies in addition to the core Play library +javaEbean, the Ebean plugin for Java. +javaJdbc, the Java database API. + +This product includes/uses JMockit (http://jmockit.org/) +Notice: https://github.com/jmockit/jmockit1/blob/master/NOTICE.txt +License: The MIT License (https://github.com/jmockit/jmockit1/blob/master/LICENSE.txt) + +This product includes/uses Mockito (http://mockito.org) +Copyright (c) 2007 Mockito contributors +License: The MIT License (https://github.com/mockito/mockito/blob/master/LICENSE) + +This product includes/uses Metrics (http://metrics.dropwizard.io/) +Copyright 2010-2014, Coda Hale, Yammer Inc.. Created using Sphinx 1.4.1. +License: Apache 2.0 (https://github.com/dropwizard/metrics/blob/master/LICENSE) + +This product includes/uses ember-cli (https://ember-cli.com/) +Copyright (c) 2013-2016 Stefan Penner, Robert Jackson and ember-cli contributors +License: The MIT License (MIT) (https://github.com/ember-cli/ember-cli/blob/master/LICENSE) + +This product includes/uses bower (https://bower.io/) +Copyright (c) 2016 Twitter and other contributors (https://github.com/bower/bower/blob/master/LICENSE) + +This product includes/uses broccoli-asset-rev +Copyright (c) 2014 Rick Harrison +The MIT License (MIT) (https://github.com/rickharrison/broccoli-asset-rev/blob/master/LICENSE) + +------------------------------------------------------------------------------ +Attribution for JavaScript Libraries +------------------------------------------------------------------------------ + +jQuery (http://jquery.com/) +Copyright 2005, 2014 jQuery Foundation, Inc. and other contributors +License: MIT License (http://jquery.org/license) + +jQuery Deserialize (https://github.com/kflorence/jquery-deserialize/) +Copyright (c) 2015 Kyle Florence +License: Dual licensed under the MIT and GPLv2 licenses. + +Bootstrap (http://getbootstrap.com/) +Copyright 2011-2016 Twitter, Inc. +License: MIT License (https://github.com/twbs/bootstrap/blob/master/LICENSE) + +Bootstrap Datepicker (https://github.com/eternicode/bootstrap-datepicker) +Copyright 2012 Stefan Petre, Improvements by Andrew Rowls +License: Apache 2.0 + +D3.js (http://d3js.org) +Copyright (c) 2010-2016, Michael Bostock +License: BSD 3-Clause (https://github.com/mbostock/d3/blob/master/LICENSE) + +ember.js ( http://emberjs.com/) +Copyright (c) 2016 Yehuda Katz, Tom Dale and Ember.js contributors +License: (https://github.com/emberjs/ember.js/blob/master/LICENSE) + +------------------------------------------------------------------------------ + + diff --git a/README.md b/README.md index 172604f50..7283dbba7 100644 --- a/README.md +++ b/README.md @@ -1,61 +1,52 @@ -## Dr Elephant +# Dr. Elephant -### Compiling & testing locally +[![Build Status](https://api.travis-ci.org/linkedin/dr-elephant.svg)](https://travis-ci.org/linkedin/dr-elephant/) +[![Join the chat at https://gitter.im/linkedin/dr-elephant](https://badges.gitter.im/linkedin/dr-elephant.svg)](https://gitter.im/linkedin/dr-elephant?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) -* To be able to build & run the application, download and install [Play framework 2.2.2](http://downloads.typesafe.com/play/2.2.2/play-2.2.2.zip). -* The pre-installed play command on our boxes will not work as it is configured to look at LinkedIns repos -* If this is your first time working with Dr. Elephant, take the deployed Hadoop jars and put them in the /lib directory: - scp eat1-magicgw01.grid.linkedin.com:/export/apps/hadoop/latest/hadoop-core-1.2.1-p3.jar ./lib/. + -* To build and run the application in dev mode, run from command line "play run" in the project directory. -* There is need to investigate the framework to see how one can add parameters to the classpath in dev mode. +**Dr. Elephant** is a performance monitoring and tuning tool for Hadoop and Spark. It automatically gathers all the metrics, runs analysis on them, and presents them in a simple way for easy consumption. Its goal is to improve developer productivity and increase cluster efficiency by making it easier to tune the jobs. It analyzes the Hadoop and Spark jobs using a set of pluggable, configurable, rule-based heuristics that provide insights on how a job performed, and then uses the results to make suggestions about how to tune the job to make it perform more efficiently. -### Deployment +## Documentation -* To create a deployment package, use "play dist" to create a zip package, or use "play universal:package-zip-tarball" to create a tarball -* To run the deployed package with Hadoop properly, some changes needs to be added to the startup script located at ./bin/dr-elephant +For more information on Dr. Elephant, check the wiki pages [here](https://github.com/linkedin/dr-elephant/wiki). -* in the classpath ("declare -r app\_classpath=...") , add to the end of the string, before the end quotes +For quick setup instructions: [Click here](https://github.com/linkedin/dr-elephant/wiki/Quick-Setup-Instructions) - :$HADOOP_HOME/*:$HADOOP_HOME/lib/*:$HADOOP_HOME/conf +Developer guide: [Click here](https://github.com/linkedin/dr-elephant/wiki/Developer-Guide) -* after the next line ("addJava ... ;"), add new line +Administrator guide: [Click here](https://github.com/linkedin/dr-elephant/wiki/Administrator-Guide) - addJava "-Djava.library.path=$HADOOP_HOME/lib/native/Linux-amd64-64" +User guide: [Click here](https://github.com/linkedin/dr-elephant/wiki/User-Guide) -### New Deployment (All previous instructions are deprecated!) +Engineering Blog: [Click here](https://engineering.linkedin.com/blog/2016/04/dr-elephant-open-source-self-serve-performance-tuning-hadoop-spark) -* ./compile.sh will create two zips under 'dist' dir which can deploy with h1 and h2 directly without changing classpath -* When test dr.e in hadoop2.x locally, HADOOP_HOME and HADOOP_CONF_DIR need to be set properly -* Upon deployment on cluster, we can specify keytab and database location at runtime: ./bin/dr-elephant -Dhttp.port=xxxx -Dkeytab.user="xxxx" -Dkeytab.location="xxxx" -Ddb.default.url="jdbc:mysql://xxxx" -Ddb.default.user=xxxx -Ddb.default.password=xxxx so that we don't have to change application.conf at compile time +## Mailing-list & Github Issues +Google groups mailing list: [Click here](https://groups.google.com/forum/#!forum/dr-elephant-users) +Github issues: [click here](https://github.com/linkedin/dr-elephant/issues) -### DB Schema evolutions +## Meetings -When the schema in the model package changes, play will need to be ran to automatically apply the evolution. +We have scheduled a weekly Dr. Elephant meeting for the interested developers and users to discuss future plans for Dr. Elephant. Please [click here](https://github.com/linkedin/dr-elephant/issues/209) for details. -* There is a problem with Ebean where it does not support something like @Index to generate indices for columns of interest -* So what we did to work around this is to manually add indices into the sql script. -* To do this, we needed to prevent the automatically generated sql to overwrite our modified sql. -* The evolution sql file must be changed (by moving or removing the header "To stop Ebean DDL generation, remove this comment and start using Evolutions") to make sure it does not automatically generate new sql. -* To re-create the sql file from a new schema in code: - * Backup the file at ./conf/evolutions/default/1.sql - * Remove the file - * Run play in debug mode and browse the page. This causes EBean to generate the new sql file, and automatically apply the evolution. - * Copy over the indices from the old 1.sql file - * Remove the header in the sql file so it does not get overwritten - * Browse the page again to refresh the schema to add the indices. +## How to Contribute? -### Running on the cluster +Check this [link](https://github.com/linkedin/dr-elephant/wiki/How-to-Contribute%3F). -* SSH into the machine -* sudo as elephant -* go to /export/apps/elephant/ -* To start: ./run.sh -* To kill: ./kill.sh -* To deploy new version: - * scp machine:location-to-drelephant.zip /export/apps/elephant/ - * ./kill.sh - * unzip dr-elephant-0.1-SNAPSHOT.zip - * ./run.sh +## License + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. diff --git a/app-conf/AggregatorConf.xml b/app-conf/AggregatorConf.xml new file mode 100644 index 000000000..23586d587 --- /dev/null +++ b/app-conf/AggregatorConf.xml @@ -0,0 +1,43 @@ + + + + + + + mapreduce + com.linkedin.drelephant.mapreduce.MapReduceMetricsAggregator + + + spark + com.linkedin.drelephant.spark.SparkMetricsAggregator + + 0.5 + + + diff --git a/app-conf/FetcherConf.xml b/app-conf/FetcherConf.xml new file mode 100644 index 000000000..d06ce8bf7 --- /dev/null +++ b/app-conf/FetcherConf.xml @@ -0,0 +1,68 @@ + + + + + + + mapreduce + com.linkedin.drelephant.mapreduce.fetchers.MapReduceFetcherHadoop2 + + false + + + + + + + spark + com.linkedin.drelephant.spark.fetchers.SparkFetcher + + diff --git a/app-conf/GeneralConf.xml b/app-conf/GeneralConf.xml new file mode 100644 index 000000000..944f10d0e --- /dev/null +++ b/app-conf/GeneralConf.xml @@ -0,0 +1,48 @@ + + + + + + + drelephant.analysis.thread.count + 3 + Number of threads to analyze the completed jobs + + + drelephant.analysis.fetch.interval + 60000 + Interval between fetches in milliseconds + + + drelephant.analysis.retry.interval + 60000 + Interval between retries in milliseconds + + + drelephant.application.search.match.partial + true + If this property is "false", search will only make exact matches + + + + diff --git a/app-conf/HeuristicConf.xml b/app-conf/HeuristicConf.xml new file mode 100644 index 000000000..21a00168a --- /dev/null +++ b/app-conf/HeuristicConf.xml @@ -0,0 +1,197 @@ + + + + + + + + + mapreduce + Mapper Data Skew + com.linkedin.drelephant.mapreduce.heuristics.MapperDataSkewHeuristic + views.html.help.mapreduce.helpMapperDataSkew + + + + + mapreduce + Mapper GC + com.linkedin.drelephant.mapreduce.heuristics.MapperGCHeuristic + views.html.help.mapreduce.helpGC + + + + + mapreduce + Mapper Time + com.linkedin.drelephant.mapreduce.heuristics.MapperTimeHeuristic + views.html.help.mapreduce.helpMapperTime + + + + + mapreduce + Mapper Speed + com.linkedin.drelephant.mapreduce.heuristics.MapperSpeedHeuristic + views.html.help.mapreduce.helpMapperSpeed + + + + + mapreduce + Mapper Spill + com.linkedin.drelephant.mapreduce.heuristics.MapperSpillHeuristic + views.html.help.mapreduce.helpMapperSpill + + + + + mapreduce + Mapper Memory + com.linkedin.drelephant.mapreduce.heuristics.MapperMemoryHeuristic + views.html.help.mapreduce.helpMapperMemory + + + + + mapreduce + Reducer Data Skew + com.linkedin.drelephant.mapreduce.heuristics.ReducerDataSkewHeuristic + views.html.help.mapreduce.helpReducerDataSkew + + + + + mapreduce + Reducer GC + com.linkedin.drelephant.mapreduce.heuristics.ReducerGCHeuristic + views.html.help.mapreduce.helpGC + + + + + mapreduce + Reducer Time + com.linkedin.drelephant.mapreduce.heuristics.ReducerTimeHeuristic + views.html.help.mapreduce.helpReducerTime + + + + + mapreduce + Reducer Memory + com.linkedin.drelephant.mapreduce.heuristics.ReducerMemoryHeuristic + views.html.help.mapreduce.helpReducerMemory + + + + + mapreduce + Shuffle & Sort + com.linkedin.drelephant.mapreduce.heuristics.ShuffleSortHeuristic + views.html.help.mapreduce.helpShuffleSort + + + + + mapreduce + Exception + com.linkedin.drelephant.mapreduce.heuristics.ExceptionHeuristic + views.html.help.mapreduce.helpException + + + + mapreduce + Distributed Cache Limit + com.linkedin.drelephant.mapreduce.heuristics.DistributedCacheLimitHeuristic + views.html.help.mapreduce.helpDistributedCacheLimit + + 500000000 + + + + + + + + spark + Spark Configuration + com.linkedin.drelephant.spark.heuristics.ConfigurationHeuristic + views.html.help.spark.helpConfigurationHeuristic + + + spark + Spark Executor Metrics + com.linkedin.drelephant.spark.heuristics.ExecutorsHeuristic + views.html.help.spark.helpExecutorsHeuristic + + + spark + Spark Job Metrics + com.linkedin.drelephant.spark.heuristics.JobsHeuristic + views.html.help.spark.helpJobsHeuristic + + + spark + Spark Stage Metrics + com.linkedin.drelephant.spark.heuristics.StagesHeuristic + views.html.help.spark.helpStagesHeuristic + + + diff --git a/app-conf/JobTypeConf.xml b/app-conf/JobTypeConf.xml new file mode 100644 index 000000000..8a4cae3eb --- /dev/null +++ b/app-conf/JobTypeConf.xml @@ -0,0 +1,77 @@ + + + + + + + Spark + spark + spark.app.id + + + + Pig + mapreduce + pig.script + + + Hive + mapreduce + hive.mapred.mode + + + OozieLauncher + mapreduce + oozie.launcher.action.main.class + + + Cascading + mapreduce + cascading.app.frameworks + + + Voldemort + mapreduce + mapred.reducer.class + voldemort.store.readonly.mr.* + + + Kafka + mapreduce + kafka.url + + + HadoopJava + mapreduce + mapred.child.java.opts + + + diff --git a/app-conf/SchedulerConf.xml b/app-conf/SchedulerConf.xml new file mode 100644 index 000000000..3bfd26cd7 --- /dev/null +++ b/app-conf/SchedulerConf.xml @@ -0,0 +1,72 @@ + + + + + + + airflow + com.linkedin.drelephant.schedulers.AirflowScheduler + + http://localhost:8000 + + + + + azkaban + com.linkedin.drelephant.schedulers.AzkabanScheduler + + + + + + + + + + + + oozie + com.linkedin.drelephant.schedulers.OozieScheduler + + + http://localhost:11000/oozie + + + + + + + no_scheduler + com.linkedin.drelephant.schedulers.NoScheduler + + + diff --git a/app-conf/elephant.conf b/app-conf/elephant.conf new file mode 100644 index 000000000..2ab7852c2 --- /dev/null +++ b/app-conf/elephant.conf @@ -0,0 +1,55 @@ +# Play application server port +port=8080 + +# Secret key +# The secret key is used to secure cryptographics functions. +# If you deploy your application to several instances be sure to use the same key! +# You can set the key using the env variable APPLICATION_SECRET or set it here +# Setting it here has higher precedence than env variable. +# application_secret="changeme" + +# Database configuration +db_url=localhost +db_name=drelephant +db_user=root +db_password="" + +# Enable web analytics for the application. +# By default analytics is not turned on. Set this property +# to true and paste the javascript snippet into 'public/analytics/track.js' for +# enabling web analytics for the application. You may configure an analytics application +# like piwik. More information on piwik at piwik.org +enable_analytics=false + +# Set the keytab user and the path to the keytab file if security is enabled. +# keytab_user="" +# keytab_location="" + +# Additional Configuration +# Check https://www.playframework.com/documentation/2.2.x/ProductionConfiguration +jvm_args="-Devolutionplugin=enabled -DapplyEvolutions.default=true" + +# Property enables dropwizard metrics for the application. +# More info on Dropwizard metrics at http://metrics.dropwizard.io +# By default metrics are turned which provides several useful stats for +# the application. The following endpoints can be queried once the application is up. +# /ping +# /healthcheck +# /metrics +metrics=true + +# +# Property enables an agent jar to be loaded along with the Dr. Elephant application. +# The intention with this property is to have the agent publish metrics to other +# applications, although it can do anything else. This property is disabled by default +# and users wishing to make use of it should provide their own implementation of the agent. +# +# More information on writing a jvm agent can be found under the following. +# https://docs.oracle.com/javase/8/docs/api/java/lang/instrument/package-summary.html +# +# Sample configuration of this property is shown below. +# metrics_agent_jar="-javaagent:lib/your_agent.jar" +# +# Sample configuration of the agent with additional options. +# metrics_agent_jar="-javaagent:lib/your_agent.jar=app-name=dr-elephant,app-host=foo" + diff --git a/app/Global.java b/app/Global.java index 02b04175d..9b346e0b5 100644 --- a/app/Global.java +++ b/app/Global.java @@ -1,3 +1,19 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + import com.linkedin.drelephant.DrElephant; import com.sun.security.sasl.util.AbstractSaslImpl; @@ -11,27 +27,30 @@ import java.util.logging.Level; +/** + * This class manages all the global settings + */ public class Global extends GlobalSettings { - DrElephant drElephant; + DrElephant _drElephant; public void onStart(Application app) { - Logger.info("Application has started"); + Logger.info("Starting Application..."); fixJavaKerberos(); try { - drElephant = new DrElephant(); - drElephant.start(); + _drElephant = new DrElephant(); + _drElephant.start(); } catch (IOException e) { Logger.error("Application start failed...", e); } } public void onStop(Application app) { - Logger.info("Application shutdown..."); - if (drElephant != null) { - drElephant.kill(); + Logger.info("Stopping application..."); + if (_drElephant != null) { + _drElephant.kill(); } } diff --git a/app/com/linkedin/drelephant/DrElephant.java b/app/com/linkedin/drelephant/DrElephant.java index c471029cf..f0de35a02 100644 --- a/app/com/linkedin/drelephant/DrElephant.java +++ b/app/com/linkedin/drelephant/DrElephant.java @@ -1,23 +1,42 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + package com.linkedin.drelephant; import java.io.IOException; +/** + * The main class which starts Dr. Elephant + */ public class DrElephant extends Thread { - private ElephantRunner elephant; + private ElephantRunner _elephant; public DrElephant() throws IOException { - elephant = new ElephantRunner(); + _elephant = new ElephantRunner(); } @Override public void run() { - elephant.run(); + _elephant.run(); } public void kill() { - if (elephant != null) { - elephant.kill(); + if (_elephant != null) { + _elephant.kill(); } } } diff --git a/app/com/linkedin/drelephant/ElephantAnalyser.java b/app/com/linkedin/drelephant/ElephantAnalyser.java deleted file mode 100644 index 6ad090dfa..000000000 --- a/app/com/linkedin/drelephant/ElephantAnalyser.java +++ /dev/null @@ -1,83 +0,0 @@ -package com.linkedin.drelephant; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -import com.linkedin.drelephant.analysis.Heuristic; -import com.linkedin.drelephant.analysis.HeuristicResult; -import com.linkedin.drelephant.analysis.Severity; -import com.linkedin.drelephant.analysis.heuristics.*; -import com.linkedin.drelephant.hadoop.HadoopJobData; - -import model.JobType; - -public class ElephantAnalyser { - public static final String NO_DATA = "No Data Received"; - private static final ElephantAnalyser instance = new ElephantAnalyser(); - - private HeuristicResult nodata; - private List heuristics = new ArrayList(); - public List heuristicNames = new ArrayList(); - - public ElephantAnalyser() { - nodata = new HeuristicResult(NO_DATA, Severity.LOW); - addHeuristic(new MapperDataSkewHeuristic()); - addHeuristic(new ReducerDataSkewHeuristic()); - addHeuristic(new MapperInputSizeHeuristic()); - addHeuristic(new MapperSpeedHeuristic()); - addHeuristic(new ReducerTimeHeuristic()); - addHeuristic(new ShuffleSortHeuristic()); - } - - public void addHeuristic(Heuristic heuristic) { - heuristics.add(heuristic); - heuristicNames.add(heuristic.getHeuristicName()); - } - - public HeuristicResult[] analyse(HadoopJobData data) { - if (data.getMapperData().length == 0 && data.getReducerData().length == 0) { - return new HeuristicResult[]{nodata}; - } - - List results = new ArrayList(); - for (Heuristic heuristic : heuristics) { - results.add(heuristic.apply(data)); - } - return results.toArray(new HeuristicResult[results.size()]); - } - - public JobType getJobType(HadoopJobData data) { - String pigVersion = data.getJobConf().getProperty("pig.version"); - if (pigVersion != null && !pigVersion.isEmpty()) { - return JobType.PIG; - } - String hiveMapredMode = data.getJobConf().getProperty("hive.mapred.mode"); - if (hiveMapredMode != null && !hiveMapredMode.isEmpty()) { - return JobType.HIVE; - } - - return JobType.HADOOPJAVA; - } - - - public Map getMetaUrls(HadoopJobData data) { - Map result = new HashMap(); - final String prefix = "meta.url."; - Properties jobConf = data.getJobConf(); - for (Map.Entry entry : jobConf.entrySet()) { - if (entry.getKey().toString().startsWith(prefix)) { - String key = entry.getKey().toString(); - String value = jobConf.getProperty(key); - result.put(key.substring(prefix.length()), value); - } - } - return result; - } - - public static ElephantAnalyser instance() { - return instance; - } -} diff --git a/app/com/linkedin/drelephant/ElephantContext.java b/app/com/linkedin/drelephant/ElephantContext.java new file mode 100644 index 000000000..fe6ff3026 --- /dev/null +++ b/app/com/linkedin/drelephant/ElephantContext.java @@ -0,0 +1,412 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Sets; +import com.linkedin.drelephant.analysis.ApplicationType; +import com.linkedin.drelephant.analysis.ElephantFetcher; +import com.linkedin.drelephant.analysis.HadoopApplicationData; +import com.linkedin.drelephant.analysis.HadoopMetricsAggregator; +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.JobType; +import com.linkedin.drelephant.configurations.aggregator.AggregatorConfiguration; +import com.linkedin.drelephant.configurations.aggregator.AggregatorConfigurationData; +import com.linkedin.drelephant.configurations.fetcher.FetcherConfiguration; +import com.linkedin.drelephant.configurations.fetcher.FetcherConfigurationData; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfiguration; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import com.linkedin.drelephant.configurations.jobtype.JobTypeConfiguration; +import com.linkedin.drelephant.mapreduce.MapReduceMetricsAggregator; +import com.linkedin.drelephant.util.Utils; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Set; + +import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.log4j.Logger; +import org.w3c.dom.Document; +import play.api.templates.Html; + + +/** + * This is a general singleton instance that provides globally accessible resources. + * + * It is not mandatory that an AnalysisPromise implementation must leverage this instance, but this context provides + * a way for Promises to access shared objects (singletons, thread-local variables and etc.). + */ +public class ElephantContext { + private static final Logger logger = Logger.getLogger(ElephantContext.class); + private static ElephantContext INSTANCE; + + private static final String AGGREGATORS_CONF = "AggregatorConf.xml"; + private static final String FETCHERS_CONF = "FetcherConf.xml"; + private static final String HEURISTICS_CONF = "HeuristicConf.xml"; + private static final String JOB_TYPES_CONF = "JobTypeConf.xml"; + private static final String GENERAL_CONF = "GeneralConf.xml"; + + private final Map> _heuristicGroupedNames = new HashMap>(); + private List _heuristicsConfData; + private List _fetchersConfData; + private Configuration _generalConf; + private List _aggregatorConfData; + + private final Map _nameToType = new HashMap(); + private final Map> _typeToHeuristics = new HashMap>(); + private final Map _typeToAggregator = new HashMap(); + private final Map _typeToFetcher = new HashMap(); + private final Map _heuristicToView = new HashMap(); + private Map> _appTypeToJobTypes = new HashMap>(); + + public static void init() { + INSTANCE = new ElephantContext(); + } + + public static ElephantContext instance() { + if (INSTANCE == null) { + INSTANCE = new ElephantContext(); + } + return INSTANCE; + } + + // private on purpose + private ElephantContext() { + loadConfiguration(); + } + + private void loadConfiguration() { + loadAggregators(); + loadFetchers(); + loadHeuristics(); + loadJobTypes(); + + loadGeneralConf(); + + // It is important to configure supported types in the LAST step so that we could have information from all + // configurable components. + configureSupportedApplicationTypes(); + } + + + private void loadAggregators() { + Document document = Utils.loadXMLDoc(AGGREGATORS_CONF); + + _aggregatorConfData = new AggregatorConfiguration(document.getDocumentElement()).getAggregatorsConfigurationData(); + for (AggregatorConfigurationData data : _aggregatorConfData) { + try { + Class aggregatorClass = Class.forName(data.getClassName()); + Object instance = aggregatorClass.getConstructor(AggregatorConfigurationData.class).newInstance(data); + if (!(instance instanceof HadoopMetricsAggregator)) { + throw new IllegalArgumentException( + "Class " + aggregatorClass.getName() + " is not an implementation of " + HadoopMetricsAggregator.class.getName()); + } + + ApplicationType type = data.getAppType(); + if (_typeToAggregator.get(type) == null) { + _typeToAggregator.put(type, (HadoopMetricsAggregator) instance); + } + + logger.info("Load Aggregator : " + data.getClassName()); + } catch (ClassNotFoundException e) { + throw new RuntimeException("Could not find class " + data.getClassName(), e); + } catch (InstantiationException e) { + throw new RuntimeException("Could not instantiate class " + data.getClassName(), e); + } catch (IllegalAccessException e) { + throw new RuntimeException("Could not access constructor for class" + data.getClassName(), e); + } catch (RuntimeException e) { + throw new RuntimeException(data.getClassName() + " is not a valid Aggregator class.", e); + } catch (InvocationTargetException e) { + throw new RuntimeException("Could not invoke class " + data.getClassName(), e); + } catch (NoSuchMethodException e) { + throw new RuntimeException("Could not find constructor for class " + data.getClassName(), e); + } + } + + } + /** + * Load all the fetchers configured in FetcherConf.xml + */ + private void loadFetchers() { + Document document = Utils.loadXMLDoc(FETCHERS_CONF); + + _fetchersConfData = new FetcherConfiguration(document.getDocumentElement()).getFetchersConfigurationData(); + for (FetcherConfigurationData data : _fetchersConfData) { + try { + Class fetcherClass = Class.forName(data.getClassName()); + Object instance = fetcherClass.getConstructor(FetcherConfigurationData.class).newInstance(data); + if (!(instance instanceof ElephantFetcher)) { + throw new IllegalArgumentException( + "Class " + fetcherClass.getName() + " is not an implementation of " + ElephantFetcher.class.getName()); + } + + ApplicationType type = data.getAppType(); + if (_typeToFetcher.get(type) == null) { + _typeToFetcher.put(type, (ElephantFetcher) instance); + } + + logger.info("Load Fetcher : " + data.getClassName()); + } catch (ClassNotFoundException e) { + throw new RuntimeException("Could not find class " + data.getClassName(), e); + } catch (InstantiationException e) { + throw new RuntimeException("Could not instantiate class " + data.getClassName(), e); + } catch (IllegalAccessException e) { + throw new RuntimeException("Could not access constructor for class" + data.getClassName(), e); + } catch (RuntimeException e) { + throw new RuntimeException(data.getClassName() + " is not a valid Fetcher class.", e); + } catch (InvocationTargetException e) { + throw new RuntimeException("Could not invoke class " + data.getClassName(), e); + } catch (NoSuchMethodException e) { + throw new RuntimeException("Could not find constructor for class " + data.getClassName(), e); + } + } + } + + /** + * Load all the heuristics and their views configured in HeuristicConf.xml + */ + private void loadHeuristics() { + Document document = Utils.loadXMLDoc(HEURISTICS_CONF); + + _heuristicsConfData = new HeuristicConfiguration(document.getDocumentElement()).getHeuristicsConfigurationData(); + for (HeuristicConfigurationData data : _heuristicsConfData) { + + // Load all the heuristic classes + try { + Class heuristicClass = Class.forName(data.getClassName()); + + Object instance = heuristicClass.getConstructor(HeuristicConfigurationData.class).newInstance(data); + if (!(instance instanceof Heuristic)) { + throw new IllegalArgumentException( + "Class " + heuristicClass.getName() + " is not an implementation of " + Heuristic.class.getName()); + } + ApplicationType type = data.getAppType(); + List heuristics = _typeToHeuristics.get(type); + if (heuristics == null) { + heuristics = new ArrayList(); + _typeToHeuristics.put(type, heuristics); + } + heuristics.add((Heuristic) instance); + + logger.info("Load Heuristic : " + data.getClassName()); + } catch (ClassNotFoundException e) { + throw new RuntimeException("Could not find class " + data.getClassName(), e); + } catch (InstantiationException e) { + throw new RuntimeException("Could not instantiate class " + data.getClassName(), e); + } catch (IllegalAccessException e) { + throw new RuntimeException("Could not access constructor for class" + data.getClassName(), e); + } catch (RuntimeException e) { + // More descriptive on other runtime exception such as ClassCastException + throw new RuntimeException(data.getClassName() + " is not a valid Heuristic class.", e); + } catch (InvocationTargetException e) { + throw new RuntimeException("Could not invoke class " + data.getClassName(), e); + } catch (NoSuchMethodException e) { + throw new RuntimeException("Could not find constructor for class " + data.getClassName(), e); + } + + // Load all the heuristic views + try { + Class viewClass = Class.forName(data.getViewName()); + + Method render = viewClass.getDeclaredMethod("render"); + Html page = (Html) render.invoke(null); + _heuristicToView.put(data.getHeuristicName(), page); + + logger.info("Load View : " + data.getViewName()); + } catch (ClassNotFoundException e) { + throw new RuntimeException("Could not find view " + data.getViewName(), e); + } catch (IllegalAccessException e) { + throw new RuntimeException("Could not access render on view" + data.getViewName(), e); + } catch (RuntimeException e) { + // More descriptive on other runtime exception such as ClassCastException + throw new RuntimeException(data.getViewName() + " is not a valid view class.", e); + } catch (InvocationTargetException e) { + throw new RuntimeException("Could not invoke view " + data.getViewName(), e); + } catch (NoSuchMethodException e) { + throw new RuntimeException("Could not find method render for view " + data.getViewName(), e); + } + } + + // Bind No_DATA heuristic to its helper pages, no need to add any real configurations + _heuristicsConfData.add( + new HeuristicConfigurationData(HeuristicResult.NO_DATA.getHeuristicName(), + HeuristicResult.NO_DATA.getHeuristicClassName(), "views.html.help.helpNoData", null, null)); + } + + /** + * Decides what application types can be supported. + * + * An application type is supported if all the below are true. + * 1. A Fetcher is defined in FetcherConf.xml for the application type. + * 2. At least one Heuristic is configured in HeuristicConf.xml for the application type. + * 3. At least one job type is configured in JobTypeConf.xml for the application type. + */ + private void configureSupportedApplicationTypes() { + Set supportedTypes = Sets.intersection(_typeToFetcher.keySet(), _typeToHeuristics.keySet()); + supportedTypes = Sets.intersection(supportedTypes, _appTypeToJobTypes.keySet()); + supportedTypes = Sets.intersection(supportedTypes, _typeToAggregator.keySet()); + + _typeToAggregator.keySet().retainAll(supportedTypes); + _typeToFetcher.keySet().retainAll(supportedTypes); + _typeToHeuristics.keySet().retainAll(supportedTypes); + _appTypeToJobTypes.keySet().retainAll(supportedTypes); + + logger.info("Configuring ElephantContext..."); + for (ApplicationType type : supportedTypes) { + _nameToType.put(type.getName(), type); + + List classes = new ArrayList(); + List heuristics = _typeToHeuristics.get(type); + for (Heuristic heuristic : heuristics) { + classes.add(heuristic.getClass().getName()); + } + + List jobTypes = _appTypeToJobTypes.get(type); + logger.info("Supports " + type.getName() + " application type, using " + _typeToFetcher.get(type).toString() + + " fetcher class with Heuristics [" + StringUtils.join(classes, ", ") + "] and following JobTypes [" + + StringUtils.join(jobTypes, ", ") + "]."); + } + } + + /** + * Load all the job types configured in JobTypeConf.xml + */ + private void loadJobTypes() { + Document document = Utils.loadXMLDoc(JOB_TYPES_CONF); + JobTypeConfiguration conf = new JobTypeConfiguration(document.getDocumentElement()); + _appTypeToJobTypes = conf.getAppTypeToJobTypeList(); + } + + /** + * Load in the GeneralConf.xml file as a configuration object for other objects to access + */ + private void loadGeneralConf() { + logger.info("Loading configuration file " + GENERAL_CONF); + + _generalConf = new Configuration(); + _generalConf.addResource(this.getClass().getClassLoader().getResourceAsStream(GENERAL_CONF)); + } + + /** + * Given an application type, return the currently bound heuristics + * + * @param type The application type + * @return The corresponding heuristics + */ + public List getHeuristicsForApplicationType(ApplicationType type) { + return _typeToHeuristics.get(type); + } + + /** + * Return the heuristic names available grouped by application type. + * + * @return A map of application type name -> a list of heuristic names + */ + public Map> getAllHeuristicNames() { + if (_heuristicGroupedNames.isEmpty()) { + for (Map.Entry> entry : _typeToHeuristics.entrySet()) { + ApplicationType type = entry.getKey(); + List list = entry.getValue(); + + List nameList = new ArrayList(); + for (Heuristic heuristic : list) { + nameList.add(heuristic.getHeuristicConfData().getHeuristicName()); + } + + Collections.sort(nameList); + _heuristicGroupedNames.put(type.getName(), nameList); + } + } + + return _heuristicGroupedNames; + } + + /** + * Get the heuristic configuration data + * + * @return The configuration data of heuristics + */ + public List getHeuristicsConfigurationData() { + return _heuristicsConfData; + } + + /** + * Given an application type, return the currently ElephantFetcher that binds with the type. + * + * @param type The application type + * @return The corresponding fetcher + */ + public ElephantFetcher getFetcherForApplicationType(ApplicationType type) { + return _typeToFetcher.get(type); + } + + public HadoopMetricsAggregator getAggregatorForApplicationType(ApplicationType type) { + return _typeToAggregator.get(type); + } + + /** + * Get the application type given a type name. + * + * @return The corresponding application type, null if not found + */ + public ApplicationType getApplicationTypeForName(String typeName) { + return _nameToType.get(typeName.toUpperCase()); + } + + /** + * Get the general configuration object. + * + * @return the genral configuration object. + */ + public Configuration getGeneralConf() { + return _generalConf; + } + + /** + * Get the matched job type given a + * + * @param data The HadoopApplicationData to check + * @return The matched job type + */ + public JobType matchJobType(HadoopApplicationData data) { + if (data != null) { + List jobTypeList = _appTypeToJobTypes.get(data.getApplicationType()); + Properties jobProp = data.getConf(); + for (JobType type : jobTypeList) { + if (type.matchType(jobProp)) { + return type; + } + } + } + return null; + } + + public Map> getAppTypeToJobTypes() { + return ImmutableMap.copyOf(_appTypeToJobTypes); + } + + public Map getHeuristicToView() { + return ImmutableMap.copyOf(_heuristicToView); + } +} diff --git a/app/com/linkedin/drelephant/ElephantFetcher.java b/app/com/linkedin/drelephant/ElephantFetcher.java deleted file mode 100644 index 24f5afe62..000000000 --- a/app/com/linkedin/drelephant/ElephantFetcher.java +++ /dev/null @@ -1,18 +0,0 @@ -package com.linkedin.drelephant; - -import com.linkedin.drelephant.hadoop.HadoopJobData; -import org.apache.hadoop.security.authentication.client.AuthenticationException; - -import java.io.IOException; -import java.util.List; - - -public interface ElephantFetcher { - - public List fetchJobList() throws IOException, AuthenticationException; - - public void fetchJobData(HadoopJobData job_data) throws IOException, AuthenticationException; - - public void finishJob(HadoopJobData job_data, boolean success); - -} diff --git a/app/com/linkedin/drelephant/ElephantFetcherClassic.java b/app/com/linkedin/drelephant/ElephantFetcherClassic.java deleted file mode 100644 index de5bd3783..000000000 --- a/app/com/linkedin/drelephant/ElephantFetcherClassic.java +++ /dev/null @@ -1,310 +0,0 @@ -package com.linkedin.drelephant; - -import com.linkedin.drelephant.analysis.Constants; -import com.linkedin.drelephant.hadoop.HadoopCounterHolder; -import com.linkedin.drelephant.hadoop.HadoopCounterHolder.CounterName; -import com.linkedin.drelephant.hadoop.HadoopJobData; -import com.linkedin.drelephant.hadoop.HadoopTaskData; -import com.linkedin.drelephant.math.Statistics; - -import model.JobResult; - -import org.apache.commons.io.IOUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.mapred.*; -import org.apache.hadoop.security.authentication.client.AuthenticatedURL; -import org.apache.hadoop.security.authentication.client.AuthenticationException; -import org.apache.log4j.Logger; -import org.jsoup.Jsoup; -import org.jsoup.nodes.Document; -import org.jsoup.nodes.Element; -import org.jsoup.select.Elements; - -import java.io.IOException; -import java.net.HttpURLConnection; -import java.net.URL; -import java.text.SimpleDateFormat; -import java.text.ParseException; -import java.util.ArrayList; -import java.util.EnumMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.Set; - - -public class ElephantFetcherClassic implements ElephantFetcher { - private static final Logger logger = Logger.getLogger(ElephantFetcher.class); - private static SimpleDateFormat dateFormat = new SimpleDateFormat("d-MMM-yyyy HH:mm:ss"); - - private JobClient jobClient; - private Set previousJobs = new HashSet(); - private boolean firstRun = true; - - public ElephantFetcherClassic(Configuration hadoopConf) throws IOException { - init(hadoopConf); - } - - private void init(Configuration hadoopConf) throws IOException { - logger.info("Connecting to the jobtracker"); - jobClient = new JobClient(new JobConf(hadoopConf)); - } - - public List fetchJobList() throws IOException { - JobStatus[] result = null; - - result = jobClient.getAllJobs(); - if (result == null) { - throw new IOException("Error fetching joblist from jobtracker."); - } - - Set successJobs = filterSuccessfulJobs(result); - successJobs = filterPreviousJobs(successJobs, previousJobs); - - List jobList = new ArrayList(); - for (String jobId : successJobs) { - jobList.add(new HadoopJobData().setJobId(jobId)); - } - return jobList; - } - - public void finishJob(HadoopJobData jobData, boolean success) { - if (success) { - previousJobs.add(jobData.getJobId()); - } - } - - public void fetchJobData(HadoopJobData jobData) throws IOException, AuthenticationException { - JobID job_id = JobID.forName(jobData.getJobId()); - - RunningJob job = getJob(job_id); - if (job == null) { - throw new IOException("Unable to fetch job data from Jobtracker, job id = " + job_id); - } - - JobStatus status = job.getJobStatus(); - String username = status.getUsername(); - long startTime = status.getStartTime(); - String jobUrl = job.getTrackingURL(); - String jobName = job.getJobName(); - - HadoopCounterHolder counterHolder = fetchCounter(job.getCounters()); - - TaskReport[] mapperTasks = getMapTaskReports(job_id); - TaskReport[] reducerTasks = getReduceTaskReports(job_id); - String jobTrackingUrl = job.getTrackingURL(); - int sampleSize = Constants.SHUFFLE_SORT_MAX_SAMPLE_SIZE; - - HadoopTaskData[] mappers = new HadoopTaskData[mapperTasks.length]; - Statistics.shuffleArraySample(mapperTasks, sampleSize); - for (int i = 0; i < mapperTasks.length; i++) { - mappers[i] = fetchTaskData(jobTrackingUrl, mapperTasks[i], false, (i < sampleSize)); - } - - HadoopTaskData[] reducers = new HadoopTaskData[reducerTasks.length]; - Statistics.shuffleArraySample(reducerTasks, sampleSize); - for (int i = 0; i < reducerTasks.length; i++) { - reducers[i] = fetchTaskData(jobTrackingUrl, reducerTasks[i], true, (i < sampleSize)); - } - - Properties jobConf = getJobConf(job); - - jobData.setUsername(username).setStartTime(startTime).setUrl(jobUrl).setJobName(jobName).setCounters(counterHolder) - .setMapperData(mappers).setReducerData(reducers).setJobConf(jobConf); - - } - - private RunningJob getJob(JobID job_id) throws IOException { - return jobClient.getJob(job_id); - } - - private TaskReport[] getMapTaskReports(JobID job_id) throws IOException { - return jobClient.getMapTaskReports(job_id); - } - - private TaskReport[] getReduceTaskReports(JobID job_id) throws IOException { - return jobClient.getReduceTaskReports(job_id); - } - - private Properties getJobConf(RunningJob job) throws IOException, AuthenticationException { - Properties properties = new Properties(); - String jobconfUrl = getJobconfUrl(job); - if (jobconfUrl == null) { - return properties; - } - - URL url = new URL(jobconfUrl); - AuthenticatedURL.Token token = new AuthenticatedURL.Token(); - HttpURLConnection conn = new AuthenticatedURL().openConnection(url, token); - String data = IOUtils.toString(conn.getInputStream()); - Document doc = Jsoup.parse(data); - Elements rows = doc.select("table").select("tr"); - for (int i = 1; i < rows.size(); i++) { - Element row = rows.get(i); - Elements cells = row.select("> td"); - if (cells.size() == 2) { - String key = cells.get(0).text().trim(); - String value = cells.get(1).text().trim(); - properties.put(key, value); - } - } - return properties; - } - - private String getJobconfUrl(RunningJob job) { - String jobDetails = job.getTrackingURL(); - String root = jobDetails.substring(0, jobDetails.indexOf("jobdetails.jsp")); - return root + "jobconf.jsp?jobid=" + job.getID().toString(); - } - - private Set filterSuccessfulJobs(JobStatus[] jobs) { - Set successJobs = new HashSet(); - for (JobStatus job : jobs) { - if (job.getRunState() == JobStatus.SUCCEEDED && job.isJobComplete()) { - successJobs.add(job.getJobID().toString()); - } - } - return successJobs; - } - - private Set filterPreviousJobs(Set jobs, Set previousJobs) { - logger.info("Cleaning up previous runs."); - // On first run, check against DB - if (firstRun) { - Set newJobs = new HashSet(); - for (String jobId : jobs) { - JobResult prevResult = JobResult.find.byId(jobId); - if (prevResult == null) { - // Job not found, add to new jobs list - newJobs.add(jobId); - } else { - // Job found, add to old jobs list - previousJobs.add(jobId); - } - } - jobs = newJobs; - firstRun = false; - } else { - // Remove untracked jobs - previousJobs.retainAll(jobs); - // Remove previously analysed jobs - jobs.removeAll(previousJobs); - } - return jobs; - } - - private HadoopTaskData fetchTaskData(String jobDetailUrl, TaskReport task, boolean isReducer, boolean sampled) - throws IOException, AuthenticationException { - - HadoopCounterHolder taskCounter = fetchCounter(task.getCounters()); - - if (!sampled) { - return new HadoopTaskData(taskCounter); - } - - String taskDetailsUrl = getTaskDetailsPage(jobDetailUrl, task.getTaskID().toString()); - long[] time = fetchTaskDetails(taskDetailsUrl, isReducer); - - return new HadoopTaskData(taskCounter, time); - } - - private String getTaskDetailsPage(String jobDetails, String taskId) { - String root = jobDetails.substring(0, jobDetails.indexOf("jobdetails.jsp")); - return root + "taskdetails.jsp?tipid=" + taskId.toString(); - } - - private long[] fetchTaskDetails(String taskDetailUrl, boolean isReducer) throws IOException, AuthenticationException { - - URL url = new URL(taskDetailUrl); - AuthenticatedURL.Token token = new AuthenticatedURL.Token(); - HttpURLConnection conn = new AuthenticatedURL().openConnection(url, token); - String data = IOUtils.toString(conn.getInputStream()); - Document doc = Jsoup.parse(data); - Elements rows = doc.select("table").select("tr"); - long[] time = null; - for (int i = 1; i < rows.size(); i++) { - Element row = rows.get(i); - try { - time = tryExtractDetailFromRow(row, isReducer); - if (time != null) { - return time; - } - } catch (Exception e) { - throw new IOException("Error in fetch task data from task detail page. TASK URL=" + taskDetailUrl, e); - } - } - throw new IOException("No valid time data found from task detail page. TASK URL=" + taskDetailUrl); - } - - //Return shuffle sort time if successfully extracted data from row - private long[] tryExtractDetailFromRow(Element row, boolean isReducer) throws ParseException { - Elements cells = row.select("> td"); - - // For rows() in reducer task page with other than 12 cols(),or 10 cols in mapper page, - // they are not rows that contains time data - if ((isReducer && cells.size() != 12) || (!isReducer && cells.size() != 10)) { - return null; - } - - boolean succeeded = cells.get(2).html().trim().equals("SUCCEEDED"); - if (succeeded) { - if (isReducer) { - // Fetch time info from reducer task page - String startTime = cells.get(4).html().trim(); - String shuffleTime = cells.get(5).html().trim(); - String sortTime = cells.get(6).html().trim(); - String finishTime = cells.get(7).html().trim(); - if (shuffleTime.contains("(")) { - shuffleTime = shuffleTime.substring(0, shuffleTime.indexOf("(") - 1); - } - if (sortTime.contains("(")) { - sortTime = sortTime.substring(0, sortTime.indexOf("(") - 1); - } - if (finishTime.contains("(")) { - finishTime = finishTime.substring(0, finishTime.indexOf("(") - 1); - } - long start = dateFormat.parse(startTime).getTime(); - long shuffle = dateFormat.parse(shuffleTime).getTime(); - long sort = dateFormat.parse(sortTime).getTime(); - long finish = dateFormat.parse(finishTime).getTime(); - - long shuffleDuration = (shuffle - start); - long sortDuration = (sort - shuffle); - return new long[] { start, finish, shuffleDuration, sortDuration }; - } else { - // Fetch time info from mapper task page - String startTime = cells.get(4).html().trim(); - String finishTime = cells.get(5).html().trim(); - if (finishTime.contains("(")) { - finishTime = finishTime.substring(0, finishTime.indexOf("(") - 1); - } - long start = dateFormat.parse(startTime).getTime(); - long finish = dateFormat.parse(finishTime).getTime(); - return new long[] { start, finish, 0, 0 }; - } - } - return null; - } - - private HadoopCounterHolder fetchCounter(Counters counters) { - Map counterMap = new EnumMap(CounterName.class); - for (CounterName counterName : CounterName.values()) { - counterMap.put(counterName, readCounter(counterName, counters)); - } - return new HadoopCounterHolder(counterMap); - } - - private long readCounter(CounterName counterName, Counters counters) { - String groupName = counterName.getGroup().getName(); - Counters.Group group = counters.getGroup(groupName); - if (group == null) { - return 0; - } - Counters.Counter counter = group.getCounterForName(counterName.getName()); - if (counter == null) { - return 0; - } - return counter.getValue(); - } -} diff --git a/app/com/linkedin/drelephant/ElephantFetcherYarn.java b/app/com/linkedin/drelephant/ElephantFetcherYarn.java deleted file mode 100644 index 3814d1fb7..000000000 --- a/app/com/linkedin/drelephant/ElephantFetcherYarn.java +++ /dev/null @@ -1,364 +0,0 @@ -package com.linkedin.drelephant; - -import com.google.common.collect.Lists; -import com.linkedin.drelephant.hadoop.HadoopCounterHolder; -import com.linkedin.drelephant.hadoop.HadoopCounterHolder.CounterName; -import com.linkedin.drelephant.hadoop.HadoopJobData; -import com.linkedin.drelephant.hadoop.HadoopTaskData; - -import model.JobResult; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.security.authentication.client.AuthenticatedURL; -import org.apache.hadoop.security.authentication.client.AuthenticationException; -import org.apache.log4j.Logger; -import org.codehaus.jackson.JsonNode; -import org.codehaus.jackson.map.ObjectMapper; - -import java.io.IOException; -import java.net.HttpURLConnection; -import java.net.MalformedURLException; -import java.net.URL; -import java.net.URLConnection; -import java.util.ArrayList; -import java.util.EnumMap; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.Set; - - -public class ElephantFetcherYarn implements ElephantFetcher { - private static final Logger logger = Logger.getLogger(ElephantFetcher.class); - - private RetryFactory retryFactory; - private URLFactory urlFactory; - private JSONFactory jsonFactory; - private boolean firstTime = true; - private long lastTime = 0; - private long currentTime = 0; - - public ElephantFetcherYarn(Configuration hadoopConf) throws IOException { - init(hadoopConf); - } - - private void init(Configuration hadoopConf) throws IOException { - logger.info("Connecting to the job history server..."); - String jhistoryAddr = hadoopConf.get("mapreduce.jobhistory.webapp.address"); - urlFactory = new URLFactory(jhistoryAddr); - jsonFactory = new JSONFactory(); - retryFactory = new RetryFactory(); - logger.info("Connection success."); - } - - /* - * Fetch job list to analyze - * If first time, search time span from 0 to now, check database for each job - * If not first time, search time span since last fetch, also re-fetch failed jobs - * Return list on success, throw Exception on error - */ - public List fetchJobList() throws IOException, AuthenticationException { - - List jobList; - - currentTime = System.currentTimeMillis(); - URL joblistURL = urlFactory.fetchJobListURL(lastTime, currentTime); - - jobList = jsonFactory.getJobData(joblistURL, firstTime); - if (firstTime) { - firstTime = false; - } else { - // If not first time, also fetch jobs that need to retry - jobList.addAll(retryFactory.getJobs()); - } - - lastTime = currentTime; - - return jobList; - } - - // Check database to see if a job is already analyzed - private boolean checkDBforJob(String jobId) { - JobResult result = JobResult.find.byId(jobId); - return (result != null); - } - - // Clear all data stored on the job object - private void clearJobData(HadoopJobData jobData) { - jobData.setCounters(null).setJobConf(null).setMapperData(null).setReducerData(null); - } - - // OnJobFinish Add to retry list upon failure - public void finishJob(HadoopJobData jobData, boolean success) { - if (!success) { - clearJobData(jobData); - // Add to retry list - retryFactory.addJob(jobData); - } - } - - // Fetch job detailed data. Return true on success - public void fetchJobData(HadoopJobData jobData) throws IOException, AuthenticationException { - String jobId = jobData.getJobId(); - - // Fetch job counter - URL jobCounterURL = urlFactory.getJobCounterURL(jobId); - HadoopCounterHolder jobCounter = jsonFactory.getJobCounter(jobCounterURL); - - // Fetch job config - URL jobConfigURL = urlFactory.getJobConfigURL(jobId); - Properties jobConf = jsonFactory.getProperties(jobConfigURL); - - // Fetch task data - URL taskListURL = urlFactory.getTaskListURL(jobId); - List mapperList = new ArrayList(); - List reducerList = new ArrayList(); - jsonFactory.getTaskDataAll(taskListURL, jobId, mapperList, reducerList); - - HadoopTaskData[] mapperData = mapperList.toArray(new HadoopTaskData[mapperList.size()]); - HadoopTaskData[] reducerData = reducerList.toArray(new HadoopTaskData[reducerList.size()]); - - jobData.setCounters(jobCounter).setMapperData(mapperData).setReducerData(reducerData).setJobConf(jobConf); - } - - private String getJobDetailURL(String jobId) { - return urlFactory.getJobDetailURLString(jobId); - } - - private URL getTaskCounterURL(String jobId, String taskId) throws MalformedURLException { - return urlFactory.getTaskCounterURL(jobId, taskId); - } - - private URL getTaskAttemptURL(String jobId, String taskId, String attemptId) throws MalformedURLException { - return urlFactory.getTaskAttemptURL(jobId, taskId, attemptId); - } - - private class URLFactory { - - private String root; - private String restRoot; - - public URLFactory(String hserverAddr) throws IOException { - root = "http://" + hserverAddr; - restRoot = "http://" + hserverAddr + "/ws/v1/history/mapreduce/jobs"; - verifyURL(restRoot); - } - - private void verifyURL(String url) throws IOException { - final URLConnection connection = new URL(url).openConnection(); - // Check service availability - connection.connect(); - return; - } - - private String getJobDetailURLString(String jobId) { - return root + "/jobhistory/job/" + jobId; - } - - private URL fetchJobListURL(long startTime, long endTime) throws MalformedURLException { - return new URL(restRoot + "?finishedTimeBegin=" + startTime + "&finishedTimeEnd=" + endTime + "&state=SUCCEEDED"); - } - - private URL getJobConfigURL(String jobId) throws MalformedURLException { - return new URL(restRoot + "/" + jobId + "/conf"); - } - - private URL getJobCounterURL(String jobId) throws MalformedURLException { - return new URL(restRoot + "/" + jobId + "/counters"); - } - - private URL getTaskListURL(String jobId) throws MalformedURLException { - return new URL(restRoot + "/" + jobId + "/tasks"); - } - - private URL getTaskCounterURL(String jobId, String taskId) throws MalformedURLException { - return new URL(restRoot + "/" + jobId + "/tasks/" + taskId + "/counters"); - } - - private URL getTaskAttemptURL(String jobId, String taskId, String attemptId) throws MalformedURLException { - return new URL(restRoot + "/" + jobId + "/tasks/" + taskId + "/attempts/" + attemptId); - } - } - - private class JSONFactory { - private ObjectMapper mapper = new ObjectMapper(); - private AuthenticatedURL.Token token = new AuthenticatedURL.Token(); - private AuthenticatedURL authenticatedURL = new AuthenticatedURL(); - private Set counterSet = new HashSet();; - - public JSONFactory() { - // Store the set of counters we want to fetch - for (CounterName counter : CounterName.values()) { - counterSet.add(counter.getName()); - } - } - - private List getJobData(URL url, boolean checkDB) throws IOException, AuthenticationException { - List jobList = new ArrayList(); - - HttpURLConnection conn = authenticatedURL.openConnection(url, token); - JsonNode rootNode = mapper.readTree(conn.getInputStream()); - JsonNode jobs = rootNode.path("jobs").path("job"); - - for (JsonNode job : jobs) { - String jobId = job.get("id").getValueAsText(); - - // On first time, for every job, we check database - if (checkDB && checkDBforJob(jobId)) { - continue; - } - - // New job - HadoopJobData jobData = new HadoopJobData(); - jobData.setJobId(jobId).setUsername(job.get("user").getValueAsText()) - .setJobName(job.get("name").getValueAsText()).setUrl(getJobDetailURL(jobId)); - - jobList.add(jobData); - } - return jobList; - } - - private Properties getProperties(URL url) throws IOException, AuthenticationException { - Properties jobConf = new Properties(); - - HttpURLConnection conn = authenticatedURL.openConnection(url, token); - JsonNode rootNode = mapper.readTree(conn.getInputStream()); - JsonNode configs = rootNode.path("conf").path("property"); - - for (JsonNode conf : configs) { - String key = conf.get("name").getValueAsText(); - String val = conf.get("value").getValueAsText(); - jobConf.setProperty(key, val); - } - return jobConf; - } - - private HadoopCounterHolder getJobCounter(URL url) throws IOException, AuthenticationException { - Map counterMap = new EnumMap(CounterName.class); - - HttpURLConnection conn = authenticatedURL.openConnection(url, token); - JsonNode rootNode = mapper.readTree(conn.getInputStream()); - JsonNode groups = rootNode.path("jobCounters").path("counterGroup"); - - for (JsonNode group : groups) { - for (JsonNode counter : group.path("counter")) { - String name = counter.get("name").getValueAsText(); - if (counterSet.contains(name)) { - // This is a counter we want to fetch - long val = counter.get("totalCounterValue").getLongValue(); - counterMap.put(CounterName.valueOf(name), val); - } - } - } - // For every missing counters in the job, set with default value 0 - for (CounterName name : CounterName.values()) { - if (!counterMap.containsKey(name)) { - counterMap.put(name, 0L); - } - } - return new HadoopCounterHolder(counterMap); - } - - private HadoopCounterHolder getTaskCounter(URL url) throws IOException, AuthenticationException { - Map counterMap = new EnumMap(CounterName.class); - - HttpURLConnection conn = authenticatedURL.openConnection(url, token); - JsonNode rootNode = mapper.readTree(conn.getInputStream()); - JsonNode groups = rootNode.path("jobTaskCounters").path("taskCounterGroup"); - - for (JsonNode group : groups) { - for (JsonNode counter : group.path("counter")) { - String name = counter.get("name").getValueAsText(); - if (counterSet.contains(name)) { - long val = counter.get("value").getLongValue(); - counterMap.put(CounterName.valueOf(name), val); - } - } - } - - for (CounterName name : CounterName.values()) { - if (!counterMap.containsKey(name)) { - counterMap.put(name, 0L); - } - } - return new HadoopCounterHolder(counterMap); - } - - private long[] getTaskExecTime(URL url) throws IOException, AuthenticationException { - HttpURLConnection conn = authenticatedURL.openConnection(url, token); - JsonNode rootNode = mapper.readTree(conn.getInputStream()); - JsonNode taskAttempt = rootNode.path("taskAttempt"); - - long startTime = taskAttempt.get("startTime").getLongValue(); - long finishTime = taskAttempt.get("finishTime").getLongValue(); - boolean isMapper = taskAttempt.get("type").getValueAsText().equals("MAP"); - - long[] time; - if (isMapper) { - // No shuffle sore time in Mapper - time = new long[] { startTime, finishTime, 0, 0 }; - } else { - long shuffleTime = taskAttempt.get("elapsedShuffleTime").getLongValue(); - long sortTime = taskAttempt.get("elapsedMergeTime").getLongValue(); - time = new long[] { startTime, finishTime, shuffleTime, sortTime }; - } - - return time; - } - - private void getTaskDataAll(URL url, String jobId, List mapperList, List reducerList) - throws IOException, AuthenticationException { - HttpURLConnection conn = authenticatedURL.openConnection(url, token); - JsonNode rootNode = mapper.readTree(conn.getInputStream()); - JsonNode tasks = rootNode.path("tasks").path("task"); - - for (JsonNode task : tasks) { - String taskId = task.get("id").getValueAsText(); - String attemptId = task.get("successfulAttempt").getValueAsText(); - boolean isMapper = task.get("type").getValueAsText().equals("MAP"); - - URL taskCounterURL = getTaskCounterURL(jobId, taskId); - HadoopCounterHolder taskCounter = getTaskCounter(taskCounterURL); - - URL taskAttemptURL = getTaskAttemptURL(jobId, taskId, attemptId); - long[] taskExecTime = getTaskExecTime(taskAttemptURL); - - HadoopTaskData taskData = new HadoopTaskData(taskCounter, taskExecTime); - if (isMapper) { - mapperList.add(taskData); - } else { - reducerList.add(taskData); - } - } - } - } - - private class RetryFactory { - private static final int DEFAULT_RETRY = 3; - private Map retryMap = new HashMap(); - - private void addJob(HadoopJobData job) { - if (retryMap.containsKey(job)) { - // This is old retry job - int retryLeft = retryMap.get(job); - if (retryLeft == 1) { - // Drop job on max retries - logger.error("Drop job. Reason: reach max retry for job id=" + job.getJobId()); - retryMap.remove(job); - } else { - retryMap.put(job, retryLeft - 1); - } - } else { - // This is new retry job - retryMap.put(job, DEFAULT_RETRY); - } - } - - private List getJobs() { - return Lists.newArrayList(retryMap.keySet()); - } - } - -} diff --git a/app/com/linkedin/drelephant/ElephantRunner.java b/app/com/linkedin/drelephant/ElephantRunner.java index 66d540758..2bebd00c7 100644 --- a/app/com/linkedin/drelephant/ElephantRunner.java +++ b/app/com/linkedin/drelephant/ElephantRunner.java @@ -1,180 +1,230 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + package com.linkedin.drelephant; +import com.google.common.util.concurrent.ThreadFactoryBuilder; + +import com.linkedin.drelephant.analysis.AnalyticJob; +import com.linkedin.drelephant.analysis.AnalyticJobGenerator; +import com.linkedin.drelephant.analysis.HDFSContext; +import com.linkedin.drelephant.analysis.HadoopSystemContext; +import com.linkedin.drelephant.analysis.AnalyticJobGeneratorHadoop2; + +import com.linkedin.drelephant.security.HadoopSecurity; + +import controllers.MetricsController; import java.io.IOException; import java.security.PrivilegedAction; -import java.util.ArrayList; import java.util.List; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import model.JobHeuristicResult; -import model.JobResult; -import model.JobType; +import com.linkedin.drelephant.util.Utils; +import models.AppResult; +import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.hadoop.conf.Configuration; import org.apache.log4j.Logger; -import com.linkedin.drelephant.analysis.Constants; -import com.linkedin.drelephant.analysis.HeuristicResult; -import com.linkedin.drelephant.analysis.Severity; -import com.linkedin.drelephant.hadoop.HadoopJobData; -import com.linkedin.drelephant.hadoop.HadoopSecurity; -import com.linkedin.drelephant.notifications.EmailThread; - +/** + * The class that runs the Dr. Elephant daemon + */ public class ElephantRunner implements Runnable { - private static final long WAIT_INTERVAL = 10 * 1000; private static final Logger logger = Logger.getLogger(ElephantRunner.class); - private AtomicBoolean running = new AtomicBoolean(true); - private EmailThread emailer = new EmailThread(); - private HadoopSecurity hadoopSecurity; - private InfoExtractor urlRetriever = new InfoExtractor(); + + private static final long FETCH_INTERVAL = 60 * 1000; // Interval between fetches + private static final long RETRY_INTERVAL = 60 * 1000; // Interval between retries + private static final int EXECUTOR_NUM = 5; // The number of executor threads to analyse the jobs + + private static final String FETCH_INTERVAL_KEY = "drelephant.analysis.fetch.interval"; + private static final String RETRY_INTERVAL_KEY = "drelephant.analysis.retry.interval"; + private static final String EXECUTOR_NUM_KEY = "drelephant.analysis.thread.count"; + + private AtomicBoolean _running = new AtomicBoolean(true); + private long lastRun; + private long _fetchInterval; + private long _retryInterval; + private int _executorNum; + private HadoopSecurity _hadoopSecurity; + private ThreadPoolExecutor _threadPoolExecutor; + private AnalyticJobGenerator _analyticJobGenerator; + + private void loadGeneralConfiguration() { + Configuration configuration = ElephantContext.instance().getGeneralConf(); + + _executorNum = Utils.getNonNegativeInt(configuration, EXECUTOR_NUM_KEY, EXECUTOR_NUM); + _fetchInterval = Utils.getNonNegativeLong(configuration, FETCH_INTERVAL_KEY, FETCH_INTERVAL); + _retryInterval = Utils.getNonNegativeLong(configuration, RETRY_INTERVAL_KEY, RETRY_INTERVAL); + } + + private void loadAnalyticJobGenerator() { + if (HadoopSystemContext.isHadoop2Env()) { + _analyticJobGenerator = new AnalyticJobGeneratorHadoop2(); + } else { + throw new RuntimeException("Unsupported Hadoop major version detected. It is not 2.x."); + } + + try { + _analyticJobGenerator.configure(ElephantContext.instance().getGeneralConf()); + } catch (Exception e) { + logger.error("Error occurred when configuring the analysis provider.", e); + throw new RuntimeException(e); + } + } @Override public void run() { logger.info("Dr.elephant has started"); try { - hadoopSecurity = new HadoopSecurity(); - hadoopSecurity.doAs(new PrivilegedAction() { + _hadoopSecurity = HadoopSecurity.getInstance(); + _hadoopSecurity.doAs(new PrivilegedAction() { @Override public Void run() { - Constants.load(); - emailer.start(); - long lastRun; - ElephantFetcher fetcher = null; - - try { - // Tell which hadoop version from hadoop configuration, - // and start fetcher accordingly - Configuration hadoopConf = new Configuration(); - String framework = hadoopConf.get("mapreduce.framework.name"); - - if (framework != null) { - if (framework.equals("yarn")) { - fetcher = new ElephantFetcherYarn(hadoopConf); - } else if (framework.equals("classic")) { - fetcher = new ElephantFetcherClassic(hadoopConf); - } else { - logger.error("mapreduce.framework.name must be either 'classic' or 'yarn'. Current value: "+framework); - return null; - } - } else { - if (hadoopConf.get("mapred.job.tracker.http.address") != null) { - fetcher = new ElephantFetcherClassic(hadoopConf); - } else { - logger.error("Either mapreduce.framework.name or mapred.job.tracker.http.address must be set. Plseae check your configuration."); - return null; - } - } + HDFSContext.load(); + loadGeneralConfiguration(); + loadAnalyticJobGenerator(); + ElephantContext.init(); - } catch (IOException e) { - logger.error("Error initializing dr elephant fetcher! ", e); - return null; + // Initialize the metrics registries. + MetricsController.init(); + + logger.info("executor num is " + _executorNum); + if (_executorNum < 1) { + throw new RuntimeException("Must have at least 1 worker thread."); } + ThreadFactory factory = new ThreadFactoryBuilder().setNameFormat("dr-el-executor-thread-%d").build(); + _threadPoolExecutor = new ThreadPoolExecutor(_executorNum, _executorNum, 0L, TimeUnit.MILLISECONDS, + new LinkedBlockingQueue(), factory); - while (running.get()) { + while (_running.get() && !Thread.currentThread().isInterrupted()) { + _analyticJobGenerator.updateResourceManagerAddresses(); lastRun = System.currentTimeMillis(); - logger.info("Fetching job list....."); + logger.info("Fetching analytic job list..."); try { - hadoopSecurity.checkLogin(); + _hadoopSecurity.checkLogin(); } catch (IOException e) { logger.info("Error with hadoop kerberos login", e); + //Wait for a while before retry + waitInterval(_retryInterval); continue; } - List successJobs; + List todos; try { - successJobs = fetcher.fetchJobList(); + todos = _analyticJobGenerator.fetchAnalyticJobs(); } catch (Exception e) { logger.error("Error fetching job list. Try again later...", e); + //Wait for a while before retry + waitInterval(_retryInterval); continue; } - logger.info(successJobs.size() + " jobs to analyse."); - - // Analyse all ready jobs - for (HadoopJobData jobData : successJobs) { - try { - fetcher.fetchJobData(jobData); - analyzeJob(jobData); - fetcher.finishJob(jobData, true); - } catch (Exception e) { - logger.error("Error fetching job data. job id=" + jobData.getJobId(), e); - fetcher.finishJob(jobData, false); - } - } - logger.info("Finished all jobs. Waiting for refresh."); - - // Wait for long enough - long nextRun = lastRun + WAIT_INTERVAL; - long waitTime = nextRun - System.currentTimeMillis(); - while (running.get() && waitTime > 0) { - try { - Thread.sleep(waitTime); - } catch (InterruptedException e) { - logger.error("Thread interrupted", e); - } - waitTime = nextRun - System.currentTimeMillis(); + for (AnalyticJob analyticJob : todos) { + _threadPoolExecutor.submit(new ExecutorJob(analyticJob)); } + + int queueSize = _threadPoolExecutor.getQueue().size(); + MetricsController.setQueueSize(queueSize); + logger.info("Job queue size is " + queueSize); + + //Wait for a while before next fetch + waitInterval(_fetchInterval); } + logger.info("Main thread is terminated."); return null; } }); - } catch (IOException e) { - logger.error("Error on Hadoop Security setup. Failed to login with Kerberos"); + } catch (Exception e) { + logger.error(e.getMessage()); + logger.error(ExceptionUtils.getStackTrace(e)); } } - private void analyzeJob(HadoopJobData jobData) { - ElephantAnalyser analyser = ElephantAnalyser.instance(); + private class ExecutorJob implements Runnable { - logger.info("Analyze job " + jobData.getJobId()); + private AnalyticJob _analyticJob; - HeuristicResult[] analysisResults = analyser.analyse(jobData); - JobType jobType = analyser.getJobType(jobData); - - // Save to DB - JobResult result = new JobResult(); - result.job_id = jobData.getJobId(); - result.url = jobData.getUrl(); - result.username = jobData.getUsername(); - result.startTime = jobData.getStartTime(); - result.analysisTime = System.currentTimeMillis(); - result.jobName = jobData.getJobName(); - result.jobType = jobType; - - // Truncate long names - if (result.jobName.length() > 100) { - result.jobName = result.jobName.substring(0, 97) + "..."; + ExecutorJob(AnalyticJob analyticJob) { + _analyticJob = analyticJob; } - result.heuristicResults = new ArrayList(); - - Severity worstSeverity = Severity.NONE; - - for (HeuristicResult heuristicResult : analysisResults) { - JobHeuristicResult detail = new JobHeuristicResult(); - detail.analysisName = heuristicResult.getAnalysis(); - detail.data = heuristicResult.getDetailsCSV(); - detail.dataColumns = heuristicResult.getDetailsColumns(); - detail.severity = heuristicResult.getSeverity(); - if (detail.dataColumns < 1) { - detail.dataColumns = 1; + + @Override + public void run() { + try { + String analysisName = String.format("%s %s", _analyticJob.getAppType().getName(), _analyticJob.getAppId()); + long analysisStartTimeMillis = System.currentTimeMillis(); + logger.info(String.format("Analyzing %s", analysisName)); + AppResult result = _analyticJob.getAnalysis(); + result.save(); + long processingTime = System.currentTimeMillis() - analysisStartTimeMillis; + logger.info(String.format("Analysis of %s took %sms", analysisName, processingTime)); + MetricsController.setJobProcessingTime(processingTime); + MetricsController.markProcessedJobs(); + + } catch (InterruptedException e) { + logger.info("Thread interrupted"); + logger.info(e.getMessage()); + logger.info(ExceptionUtils.getStackTrace(e)); + + Thread.currentThread().interrupt(); + } catch (Exception e) { + logger.error(e.getMessage()); + logger.error(ExceptionUtils.getStackTrace(e)); + + if (_analyticJob != null && _analyticJob.retry()) { + logger.error("Add analytic job id [" + _analyticJob.getAppId() + "] into the retry list."); + _analyticJobGenerator.addIntoRetries(_analyticJob); + } else { + if (_analyticJob != null) { + MetricsController.markSkippedJob(); + logger.error("Drop the analytic job. Reason: reached the max retries for application id = [" + + _analyticJob.getAppId() + "]."); + } + } } - result.heuristicResults.add(detail); - worstSeverity = Severity.max(worstSeverity, detail.severity); } + } - result.severity = worstSeverity; - urlRetriever.retrieveURLs(result, jobData); + private void waitInterval(long interval) { + // Wait for long enough + long nextRun = lastRun + interval; + long waitTime = nextRun - System.currentTimeMillis(); - result.save(); + if (waitTime <= 0) { + return; + } - emailer.enqueue(result); + try { + Thread.sleep(waitTime); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } } public void kill() { - running.set(false); - emailer.kill(); + _running.set(false); + if (_threadPoolExecutor != null) { + _threadPoolExecutor.shutdownNow(); + } } } diff --git a/app/com/linkedin/drelephant/InfoExtractor.java b/app/com/linkedin/drelephant/InfoExtractor.java deleted file mode 100644 index 9cab589ce..000000000 --- a/app/com/linkedin/drelephant/InfoExtractor.java +++ /dev/null @@ -1,44 +0,0 @@ -package com.linkedin.drelephant; - -import java.util.Properties; - -import org.apache.log4j.Logger; - -import model.JobResult; - -import com.linkedin.drelephant.hadoop.HadoopJobData; - -/** - * InfoExtractor is responsible for retrieving information and context about a - * job from the job's configuration which will be leveraged by the UI - */ -public class InfoExtractor { - private static final Logger logger = Logger.getLogger(InfoExtractor.class); - private static final String AZK_URL_PREFIX = "azkaban.link"; - private static final String AZK_WORKFLOW_URL = "azkaban.link.workflow.url"; - private static final String AZK_JOB_URL = "azkaban.link.job.url"; - private static final String AZK_JOB_EXECUTION_URL = - "azkaban.link.jobexec.url"; - private static final String AZK_EXECUTION_URL = "azkaban.link.execution.url"; - private static final String AZK_ATTEMPT_URL = "azkaban.link.attempt.url"; - private static final String AZK_URN_KEY = "azk.urn"; - - void retrieveURLs(JobResult result, HadoopJobData jobData) { - Properties jobConf = jobData.getJobConf(); - String jobId = jobData.getJobId(); - result.jobExecUrl = truncate(jobConf.getProperty(AZK_ATTEMPT_URL), jobId); - // For jobs launched by Azkaban, we consider different attempts to be - // different jobs - result.jobUrl = truncate(jobConf.getProperty(AZK_JOB_URL), jobId); - result.flowExecUrl = truncate(jobConf.getProperty(AZK_EXECUTION_URL), jobId); - result.flowUrl = truncate(jobConf.getProperty(AZK_WORKFLOW_URL), jobId); - } - - String truncate(String value, String jobId) { - if (value != null && value.length() > JobResult.URL_LEN_LIMIT) { - logger.info("Truncate long URL in job result for job : "+jobId+ ". Original Url : "+value); - value = value.substring(0, JobResult.URL_LEN_LIMIT); - } - return value; - } -} diff --git a/app/com/linkedin/drelephant/analysis/AnalyticJob.java b/app/com/linkedin/drelephant/analysis/AnalyticJob.java new file mode 100644 index 000000000..029f429f8 --- /dev/null +++ b/app/com/linkedin/drelephant/analysis/AnalyticJob.java @@ -0,0 +1,326 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.analysis; + +import com.linkedin.drelephant.ElephantContext; +import com.linkedin.drelephant.util.InfoExtractor; +import com.linkedin.drelephant.util.Utils; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import models.AppHeuristicResult; +import models.AppHeuristicResultDetails; +import models.AppResult; +import org.apache.log4j.Logger; + + +/** + * This class wraps some basic meta data of a completed application run (notice that the information is generally the + * same regardless of hadoop versions and application types), and then promises to return the analyzed result later. + */ +public class AnalyticJob { + private static final Logger logger = Logger.getLogger(AnalyticJob.class); + + private static final String UNKNOWN_JOB_TYPE = "Unknown"; // The default job type when the data matches nothing. + private static final int _RETRY_LIMIT = 3; // Number of times a job needs to be tried before dropping + private static final String EXCLUDE_JOBTYPE = "exclude_jobtypes_filter"; // excluded Job Types for heuristic + + private int _retries = 0; + private ApplicationType _type; + private String _appId; + private String _name; + private String _queueName; + private String _user; + private String _trackingUrl; + private long _startTime; + private long _finishTime; + + /** + * Returns the application type + * E.g., Mapreduce or Spark + * + * @return The application type + */ + public ApplicationType getAppType() { + return _type; + } + + /** + * Set the application type of this job. + * + * @param type The Application type + * @return The analytic job + */ + public AnalyticJob setAppType(ApplicationType type) { + _type = type; + return this; + } + + /** + * Set the application id of this job + * + * @param appId The application id of the job obtained resource manager + * @return The analytic job + */ + public AnalyticJob setAppId(String appId) { + _appId = appId; + return this; + } + + /** + * Set the name of the analytic job + * + * @param name + * @return The analytic job + */ + public AnalyticJob setName(String name) { + _name = name; + return this; + } + + /** + * Set the queue name in which the analytic jobs was submitted + * + * @param name the name of the queue + * @return The analytic job + */ + public AnalyticJob setQueueName(String name) { + _queueName = name; + return this; + } + + /** + * Sets the user who ran the job + * + * @param user The username of the user + * @return The analytic job + */ + public AnalyticJob setUser(String user) { + _user = user; + return this; + } + + /** + * Sets the start time of the job + * Start time is the time at which the job was submitted by the resource manager + * + * @param startTime + * @return The analytic job + */ + public AnalyticJob setStartTime(long startTime) { + // TIMESTAMP range starts from FROM_UNIXTIME(1) = 1970-01-01 00:00:01 + if (startTime <= 0) { + startTime = 1000; // 1 sec + } + _startTime = startTime; + return this; + } + + /** + * Sets the finish time of the job + * + * @param finishTime + * @return The analytic job + */ + public AnalyticJob setFinishTime(long finishTime) { + // TIMESTAMP range starts from FROM_UNIXTIME(1) = 1970-01-01 00:00:01 + if (finishTime <= 0) { + finishTime = 1000; // 1 sec + } + _finishTime = finishTime; + return this; + } + + /** + * Returns the application id + * + * @return The analytic job + */ + public String getAppId() { + return _appId; + } + + /** + * Returns the name of the analytic job + * + * @return the analytic job's name + */ + public String getName() { + return _name; + } + + /** + * Returns the user who ran the job + * + * @return The user who ran the analytic job + */ + public String getUser() { + return _user; + } + + /** + * Returns the time at which the job was submitted by the resource manager + * + * @return The start time + */ + public long getStartTime() { + return _startTime; + } + + /** + * Returns the finish time of the job. + * + * @return The finish time + */ + public long getFinishTime() { + return _finishTime; + } + + /** + * Returns the tracking url of the job + * + * @return The tracking url in resource manager + */ + public String getTrackingUrl() { + return _trackingUrl; + } + + /** + * Returns the queue in which the application was submitted + * + * @return The queue name + */ + public String getQueueName() { + return _queueName; + } + + /** + * Sets the tracking url for the job + * + * @param trackingUrl The url to track the job + * @return The analytic job + */ + public AnalyticJob setTrackingUrl(String trackingUrl) { + _trackingUrl = trackingUrl; + return this; + } + + /** + * Returns the analysed AppResult that could be directly serialized into DB. + * + * This method fetches the data using the appropriate application fetcher, runs all the heuristics on them and + * loads it into the AppResult model. + * + * @throws Exception if the analysis process encountered a problem. + * @return the analysed AppResult + */ + public AppResult getAnalysis() throws Exception { + ElephantFetcher fetcher = ElephantContext.instance().getFetcherForApplicationType(getAppType()); + HadoopApplicationData data = fetcher.fetchData(this); + + JobType jobType = ElephantContext.instance().matchJobType(data); + String jobTypeName = jobType == null ? UNKNOWN_JOB_TYPE : jobType.getName(); + + // Run all heuristics over the fetched data + List analysisResults = new ArrayList(); + if (data == null || data.isEmpty()) { + // Example: a MR job has 0 mappers and 0 reducers + logger.info("No Data Received for analytic job: " + getAppId()); + analysisResults.add(HeuristicResult.NO_DATA); + } else { + List heuristics = ElephantContext.instance().getHeuristicsForApplicationType(getAppType()); + for (Heuristic heuristic : heuristics) { + String confExcludedApps = heuristic.getHeuristicConfData().getParamMap().get(EXCLUDE_JOBTYPE); + + if (confExcludedApps == null || confExcludedApps.length() == 0 || + !Arrays.asList(confExcludedApps.split(",")).contains(jobTypeName)) { + HeuristicResult result = heuristic.apply(data); + if (result != null) { + analysisResults.add(result); + } + } + } + } + + + HadoopMetricsAggregator hadoopMetricsAggregator = ElephantContext.instance().getAggregatorForApplicationType(getAppType()); + hadoopMetricsAggregator.aggregate(data); + HadoopAggregatedData hadoopAggregatedData = hadoopMetricsAggregator.getResult(); + + // Load app information + AppResult result = new AppResult(); + result.id = Utils.truncateField(getAppId(), AppResult.ID_LIMIT, getAppId()); + result.trackingUrl = Utils.truncateField(getTrackingUrl(), AppResult.TRACKING_URL_LIMIT, getAppId()); + result.queueName = Utils.truncateField(getQueueName(), AppResult.QUEUE_NAME_LIMIT, getAppId()); + result.username = Utils.truncateField(getUser(), AppResult.USERNAME_LIMIT, getAppId()); + result.startTime = getStartTime(); + result.finishTime = getFinishTime(); + result.name = Utils.truncateField(getName(), AppResult.APP_NAME_LIMIT, getAppId()); + result.jobType = Utils.truncateField(jobTypeName, AppResult.JOBTYPE_LIMIT, getAppId()); + result.resourceUsed = hadoopAggregatedData.getResourceUsed(); + result.totalDelay = hadoopAggregatedData.getTotalDelay(); + result.resourceWasted = hadoopAggregatedData.getResourceWasted(); + + // Load App Heuristic information + int jobScore = 0; + result.yarnAppHeuristicResults = new ArrayList(); + Severity worstSeverity = Severity.NONE; + for (HeuristicResult heuristicResult : analysisResults) { + AppHeuristicResult detail = new AppHeuristicResult(); + detail.heuristicClass = Utils.truncateField(heuristicResult.getHeuristicClassName(), + AppHeuristicResult.HEURISTIC_CLASS_LIMIT, getAppId()); + detail.heuristicName = Utils.truncateField(heuristicResult.getHeuristicName(), + AppHeuristicResult.HEURISTIC_NAME_LIMIT, getAppId()); + detail.severity = heuristicResult.getSeverity(); + detail.score = heuristicResult.getScore(); + + // Load Heuristic Details + for (HeuristicResultDetails heuristicResultDetails : heuristicResult.getHeuristicResultDetails()) { + AppHeuristicResultDetails heuristicDetail = new AppHeuristicResultDetails(); + heuristicDetail.yarnAppHeuristicResult = detail; + heuristicDetail.name = Utils.truncateField(heuristicResultDetails.getName(), + AppHeuristicResultDetails.NAME_LIMIT, getAppId()); + heuristicDetail.value = Utils.truncateField(heuristicResultDetails.getValue(), + AppHeuristicResultDetails.VALUE_LIMIT, getAppId()); + heuristicDetail.details = Utils.truncateField(heuristicResultDetails.getDetails(), + AppHeuristicResultDetails.DETAILS_LIMIT, getAppId()); + // This was added for AnalyticTest. Commenting this out to fix a bug. Also disabling AnalyticJobTest. + //detail.yarnAppHeuristicResultDetails = new ArrayList(); + detail.yarnAppHeuristicResultDetails.add(heuristicDetail); + } + result.yarnAppHeuristicResults.add(detail); + worstSeverity = Severity.max(worstSeverity, detail.severity); + jobScore += detail.score; + } + result.severity = worstSeverity; + result.score = jobScore; + + // Retrieve information from job configuration like scheduler information and store them into result. + InfoExtractor.loadInfo(result, data); + + return result; + } + + /** + * Indicate this promise should retry itself again. + * + * @return true if should retry, else false + */ + public boolean retry() { + return (_retries++) < _RETRY_LIMIT; + } +} diff --git a/app/com/linkedin/drelephant/analysis/AnalyticJobGenerator.java b/app/com/linkedin/drelephant/analysis/AnalyticJobGenerator.java new file mode 100644 index 000000000..4015f0408 --- /dev/null +++ b/app/com/linkedin/drelephant/analysis/AnalyticJobGenerator.java @@ -0,0 +1,62 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.analysis; + +import java.io.IOException; +import java.util.List; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.authentication.client.AuthenticationException; + + +/** + * Provides AnalyticJobs that will yield to analysis results later. This class basically generates to-dos that could be + * executed later. + */ +public interface AnalyticJobGenerator { + + /** + * Configures the provider instance + * + * @param configuration The Hadoop configuration object + * @throws Exception + */ + public void configure(Configuration configuration) + throws IOException; + + /** + * Configures the resource manager addresses considering HA + */ + public void updateResourceManagerAddresses(); + + /** + * Provides a list of AnalyticJobs that should be calculated + * + * @return a list of AnalyticJobs + * @throws IOException + * @throws AuthenticationException + */ + public List fetchAnalyticJobs() + throws IOException, AuthenticationException; + + /** + * Add an AnalyticJob into retry list. Those jobs will be provided again via #fetchAnalyticJobs under + * the generator's decision. + * + * @param job The job to add + */ + public void addIntoRetries(AnalyticJob job); +} diff --git a/app/com/linkedin/drelephant/analysis/AnalyticJobGeneratorHadoop2.java b/app/com/linkedin/drelephant/analysis/AnalyticJobGeneratorHadoop2.java new file mode 100644 index 000000000..1d49ec84d --- /dev/null +++ b/app/com/linkedin/drelephant/analysis/AnalyticJobGeneratorHadoop2.java @@ -0,0 +1,247 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.analysis; + +import com.linkedin.drelephant.ElephantContext; +import com.linkedin.drelephant.math.Statistics; +import controllers.MetricsController; +import java.io.IOException; +import java.net.HttpURLConnection; +import java.net.URL; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Queue; +import java.util.Random; +import java.util.concurrent.ConcurrentLinkedQueue; +import models.AppResult; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.authentication.client.AuthenticatedURL; +import org.apache.hadoop.security.authentication.client.AuthenticationException; +import org.apache.log4j.Logger; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.map.ObjectMapper; + + +/** + * This class provides a list of analysis promises to be generated under Hadoop YARN environment + */ +public class AnalyticJobGeneratorHadoop2 implements AnalyticJobGenerator { + private static final Logger logger = Logger.getLogger(AnalyticJobGeneratorHadoop2.class); + private static final String RESOURCE_MANAGER_ADDRESS = "yarn.resourcemanager.webapp.address"; + private static final String IS_RM_HA_ENABLED = "yarn.resourcemanager.ha.enabled"; + private static final String RESOURCE_MANAGER_IDS = "yarn.resourcemanager.ha.rm-ids"; + private static final String RM_NODE_STATE_URL = "http://%s/ws/v1/cluster/info"; + private static final String FETCH_INITIAL_WINDOW_MS = "drelephant.analysis.fetch.initial.windowMillis"; + + private static Configuration configuration; + + // We provide one minute job fetch delay due to the job sending lag from AM/NM to JobHistoryServer HDFS + private static final long FETCH_DELAY = 60000; + + // Generate a token update interval with a random deviation so that it does not update the token exactly at the same + // time with other token updaters (e.g. ElephantFetchers). + private static final long TOKEN_UPDATE_INTERVAL = + Statistics.MINUTE_IN_MS * 30 + new Random().nextLong() % (3 * Statistics.MINUTE_IN_MS); + + private String _resourceManagerAddress; + private long _lastTime = 0; + private long _fetchStartTime = 0; + private long _currentTime = 0; + private long _tokenUpdatedTime = 0; + private AuthenticatedURL.Token _token; + private AuthenticatedURL _authenticatedURL; + private final ObjectMapper _objectMapper = new ObjectMapper(); + + private final Queue _retryQueue = new ConcurrentLinkedQueue(); + + public void updateResourceManagerAddresses() { + if (Boolean.valueOf(configuration.get(IS_RM_HA_ENABLED))) { + String resourceManagers = configuration.get(RESOURCE_MANAGER_IDS); + if (resourceManagers != null) { + logger.info("The list of RM IDs are " + resourceManagers); + List ids = Arrays.asList(resourceManagers.split(",")); + _currentTime = System.currentTimeMillis(); + updateAuthToken(); + for (String id : ids) { + try { + String resourceManager = configuration.get(RESOURCE_MANAGER_ADDRESS + "." + id); + String resourceManagerURL = String.format(RM_NODE_STATE_URL, resourceManager); + logger.info("Checking RM URL: " + resourceManagerURL); + JsonNode rootNode = readJsonNode(new URL(resourceManagerURL)); + String status = rootNode.path("clusterInfo").path("haState").getValueAsText(); + if (status.equals("ACTIVE")) { + logger.info(resourceManager + " is ACTIVE"); + _resourceManagerAddress = resourceManager; + break; + } else { + logger.info(resourceManager + " is STANDBY"); + } + } catch (AuthenticationException e) { + logger.info("Error fetching resource manager " + id + " state " + e.getMessage()); + } catch (IOException e) { + logger.info("Error fetching Json for resource manager "+ id + " status " + e.getMessage()); + } + } + } + } else { + _resourceManagerAddress = configuration.get(RESOURCE_MANAGER_ADDRESS); + } + if (_resourceManagerAddress == null) { + throw new RuntimeException( + "Cannot get YARN resource manager address from Hadoop Configuration property: [" + RESOURCE_MANAGER_ADDRESS + + "]."); + } + } + + @Override + public void configure(Configuration configuration) + throws IOException { + this.configuration = configuration; + String initialFetchWindowString = configuration.get(FETCH_INITIAL_WINDOW_MS); + if (initialFetchWindowString != null) { + long initialFetchWindow = Long.getLong(initialFetchWindowString); + _lastTime = System.currentTimeMillis() - FETCH_DELAY - initialFetchWindow; + _fetchStartTime = _lastTime; + } + updateResourceManagerAddresses(); + } + + /** + * Fetch all the succeeded and failed applications/analytic jobs from the resource manager. + * + * @return + * @throws IOException + * @throws AuthenticationException + */ + @Override + public List fetchAnalyticJobs() + throws IOException, AuthenticationException { + List appList = new ArrayList(); + + // There is a lag of job data from AM/NM to JobHistoryServer HDFS, we shouldn't use the current time, since there + // might be new jobs arriving after we fetch jobs. We provide one minute delay to address this lag. + _currentTime = System.currentTimeMillis() - FETCH_DELAY; + updateAuthToken(); + + logger.info("Fetching recent finished application runs between last time: " + (_lastTime + 1) + + ", and current time: " + _currentTime); + + // Fetch all succeeded apps + URL succeededAppsURL = new URL(new URL("http://" + _resourceManagerAddress), String.format( + "/ws/v1/cluster/apps?finalStatus=SUCCEEDED&finishedTimeBegin=%s&finishedTimeEnd=%s", + String.valueOf(_lastTime + 1), String.valueOf(_currentTime))); + logger.info("The succeeded apps URL is " + succeededAppsURL); + List succeededApps = readApps(succeededAppsURL); + appList.addAll(succeededApps); + + // Fetch all failed apps + // state: Application Master State + // finalStatus: Status of the Application as reported by the Application Master + URL failedAppsURL = new URL(new URL("http://" + _resourceManagerAddress), String.format( + "/ws/v1/cluster/apps?finalStatus=FAILED&state=FINISHED&finishedTimeBegin=%s&finishedTimeEnd=%s", + String.valueOf(_lastTime + 1), String.valueOf(_currentTime))); + List failedApps = readApps(failedAppsURL); + logger.info("The failed apps URL is " + failedAppsURL); + appList.addAll(failedApps); + + // Append promises from the retry queue at the end of the list + while (!_retryQueue.isEmpty()) { + appList.add(_retryQueue.poll()); + } + + _lastTime = _currentTime; + return appList; + } + + @Override + public void addIntoRetries(AnalyticJob promise) { + _retryQueue.add(promise); + int retryQueueSize = _retryQueue.size(); + MetricsController.setRetryQueueSize(retryQueueSize); + logger.info("Retry queue size is " + retryQueueSize); + } + + /** + * Authenticate and update the token + */ + private void updateAuthToken() { + if (_currentTime - _tokenUpdatedTime > TOKEN_UPDATE_INTERVAL) { + logger.info("AnalysisProvider updating its Authenticate Token..."); + _token = new AuthenticatedURL.Token(); + _authenticatedURL = new AuthenticatedURL(); + _tokenUpdatedTime = _currentTime; + } + } + + /** + * Connect to url using token and return the JsonNode + * + * @param url The url to connect to + * @return + * @throws IOException Unable to get the stream + * @throws AuthenticationException Authencation problem + */ + private JsonNode readJsonNode(URL url) + throws IOException, AuthenticationException { + HttpURLConnection conn = _authenticatedURL.openConnection(url, _token); + return _objectMapper.readTree(conn.getInputStream()); + } + + /** + * Parse the returned json from Resource manager + * + * @param url The REST call + * @return + * @throws IOException + * @throws AuthenticationException Problem authenticating to resource manager + */ + private List readApps(URL url) throws IOException, AuthenticationException{ + List appList = new ArrayList(); + + JsonNode rootNode = readJsonNode(url); + JsonNode apps = rootNode.path("apps").path("app"); + + for (JsonNode app : apps) { + String appId = app.get("id").getValueAsText(); + + // When called first time after launch, hit the DB and avoid duplicated analytic jobs that have been analyzed + // before. + if (_lastTime > _fetchStartTime || (_lastTime == _fetchStartTime && AppResult.find.byId(appId) == null)) { + String user = app.get("user").getValueAsText(); + String name = app.get("name").getValueAsText(); + String queueName = app.get("queue").getValueAsText(); + String trackingUrl = app.get("trackingUrl") != null? app.get("trackingUrl").getValueAsText() : null; + long startTime = app.get("startedTime").getLongValue(); + long finishTime = app.get("finishedTime").getLongValue(); + + ApplicationType type = + ElephantContext.instance().getApplicationTypeForName(app.get("applicationType").getValueAsText()); + + // If the application type is supported + if (type != null) { + AnalyticJob analyticJob = new AnalyticJob(); + analyticJob.setAppId(appId).setAppType(type).setUser(user).setName(name).setQueueName(queueName) + .setTrackingUrl(trackingUrl).setStartTime(startTime).setFinishTime(finishTime); + + appList.add(analyticJob); + } + } + } + return appList; + } +} diff --git a/app/com/linkedin/drelephant/analysis/ApplicationType.java b/app/com/linkedin/drelephant/analysis/ApplicationType.java new file mode 100644 index 000000000..6280cddf9 --- /dev/null +++ b/app/com/linkedin/drelephant/analysis/ApplicationType.java @@ -0,0 +1,50 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.analysis; + +/** + * Manages and represents supported application types. + */ +public class ApplicationType { + private final String _name; + + public ApplicationType(String name) { + _name = name.toUpperCase(); + } + + @Override + public int hashCode() { + return _name.hashCode(); + } + + @Override + public boolean equals(Object other) { + if (other instanceof ApplicationType) { + return ((ApplicationType) other).getName().equals(getName()); + } + return false; + } + + /** + * Get the name + * + * @return the name of the application type + */ + public String getName() { + return _name; + } +} diff --git a/app/com/linkedin/drelephant/analysis/Constants.java b/app/com/linkedin/drelephant/analysis/Constants.java deleted file mode 100644 index 9e481cceb..000000000 --- a/app/com/linkedin/drelephant/analysis/Constants.java +++ /dev/null @@ -1,25 +0,0 @@ -package com.linkedin.drelephant.analysis; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.log4j.Logger; - -import java.io.IOException; - -public class Constants { - private static final Logger logger = Logger.getLogger(Constants.class); - public static long HDFS_BLOCK_SIZE = 64 * 1024 * 1024; - public static final long DISK_READ_SPEED = 100 * 1024 * 1024; - public static final int SHUFFLE_SORT_MAX_SAMPLE_SIZE = 50; - - public static void load() { - try { - HDFS_BLOCK_SIZE = FileSystem.get(new Configuration()).getDefaultBlockSize(new Path("/")); - } catch (IOException e) { - logger.error("Error getting FS Block Size!", e); - } - - logger.info("HDFS BLock size: " + HDFS_BLOCK_SIZE); - } -} diff --git a/app/com/linkedin/drelephant/analysis/ElephantFetcher.java b/app/com/linkedin/drelephant/analysis/ElephantFetcher.java new file mode 100644 index 000000000..d83ff90ad --- /dev/null +++ b/app/com/linkedin/drelephant/analysis/ElephantFetcher.java @@ -0,0 +1,35 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.analysis; + +/** + * The interface to define common methods for each fetcher. + * + * There would be a different fetcher implementation given a different Hadoop version and a different application type. + */ +public interface ElephantFetcher { + + /** + * Given an application id, fetches the data object + * + * @param job The job being analysed + * @return the fetched data + * @throws Exception + */ + public T fetchData(AnalyticJob job) + throws Exception; +} \ No newline at end of file diff --git a/app/com/linkedin/drelephant/analysis/HDFSContext.java b/app/com/linkedin/drelephant/analysis/HDFSContext.java new file mode 100644 index 000000000..345751d67 --- /dev/null +++ b/app/com/linkedin/drelephant/analysis/HDFSContext.java @@ -0,0 +1,51 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.analysis; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.log4j.Logger; +import java.io.IOException; + + +/** + * The HDFS Information + */ +public final class HDFSContext { + private static final Logger logger = Logger.getLogger(HDFSContext.class); + + public static long HDFS_BLOCK_SIZE = 64 * 1024 * 1024; + public static final long DISK_READ_SPEED = 100 * 1024 * 1024; + + private HDFSContext() { + // Empty on purpose + } + + /** + * Captures the HDFS Block Size + */ + public static void load() { + try { + HDFS_BLOCK_SIZE = FileSystem.get(new Configuration()).getDefaultBlockSize(new Path("/")); + } catch (IOException e) { + logger.error("Error getting FS Block Size!", e); + } + + logger.info("HDFS BLock size: " + HDFS_BLOCK_SIZE); + } +} diff --git a/app/com/linkedin/drelephant/analysis/HadoopAggregatedData.java b/app/com/linkedin/drelephant/analysis/HadoopAggregatedData.java new file mode 100644 index 000000000..2c88f3d7b --- /dev/null +++ b/app/com/linkedin/drelephant/analysis/HadoopAggregatedData.java @@ -0,0 +1,76 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.analysis; + +/** + * This class contains the aggregated data of a job + */ +public class HadoopAggregatedData { + + private long resourceUsed = 0; + private long resourceWasted = 0; + private long totalDelay = 0; + + /** + * Returns the resource usage (in MBSeconds) of the job + * @return The resource usage (in MBSeconds) of the job + */ + public long getResourceUsed() { + return resourceUsed; + } + + /** + * Setter for the resource usage (in MBSeconds) of the job + * @param resourceUsed The resource usage (in MBSeconds) of the job + */ + public void setResourceUsed(long resourceUsed) { + this.resourceUsed = resourceUsed; + } + + /** + * Returns the wasted resources (in MBSeconds) of the job + * @return The wasted resources (in MBSeconds) of the job + */ + public long getResourceWasted() { + return resourceWasted; + } + + /** + * Setter for the wasted resources (in MBSeconds) + * @param resourceWasted The wasted resources (in MBSeconds) of the job + */ + public void setResourceWasted(long resourceWasted) { + this.resourceWasted = resourceWasted; + } + + /** + * returns the total delay of the job + * @return The total delay of the job + */ + public long getTotalDelay() { + return totalDelay; + } + + /** + * Setter for the total delay of the job + * @param totalDelay The total delay of the job + */ + public void setTotalDelay(long totalDelay) { + this.totalDelay = totalDelay; + } + +} diff --git a/app/com/linkedin/drelephant/analysis/HadoopApplicationData.java b/app/com/linkedin/drelephant/analysis/HadoopApplicationData.java new file mode 100644 index 000000000..35dc5457f --- /dev/null +++ b/app/com/linkedin/drelephant/analysis/HadoopApplicationData.java @@ -0,0 +1,54 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.analysis; + +import java.util.Properties; + + +/** + * This interface indicates that a class is holding the information of a Hadoop application + */ +public interface HadoopApplicationData { + + /** + * Returns the unique id to identify an application run. + * + * @return the id + */ + public String getAppId(); + + /** + * Returns the configuration of an application. + * + * @return A java Properties that contains the application configuration + */ + public Properties getConf(); + + /** + * Returns the application type this data is for + * + * @return the application type + */ + public ApplicationType getApplicationType(); + + /** + * Indicate if the data holder is actually empty (nothing is set up). + * + * @return true if the data holder is empty else false + */ + public boolean isEmpty(); +} diff --git a/app/com/linkedin/drelephant/analysis/HadoopMetricsAggregator.java b/app/com/linkedin/drelephant/analysis/HadoopMetricsAggregator.java new file mode 100644 index 000000000..3830fe008 --- /dev/null +++ b/app/com/linkedin/drelephant/analysis/HadoopMetricsAggregator.java @@ -0,0 +1,22 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.analysis; + +public interface HadoopMetricsAggregator { + public void aggregate(HadoopApplicationData data); + public HadoopAggregatedData getResult(); +} diff --git a/app/com/linkedin/drelephant/analysis/HadoopSystemContext.java b/app/com/linkedin/drelephant/analysis/HadoopSystemContext.java new file mode 100644 index 000000000..0d1a37f1c --- /dev/null +++ b/app/com/linkedin/drelephant/analysis/HadoopSystemContext.java @@ -0,0 +1,50 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.analysis; + +import org.apache.hadoop.conf.Configuration; + + +/** + * Hadoop System Information + */ +public final class HadoopSystemContext { + + private static final String MAPREDUCE_FRAMEWORK_NAME_PROP = "mapreduce.framework.name"; + private static final String YARN = "yarn"; + + /** + * Detect if the current Hadoop environment is 2.x + * + * @return true if it is Hadoop 2 env, else false + */ + public static boolean isHadoop2Env() { + Configuration hadoopConf = new Configuration(); + String hadoopVersion = hadoopConf.get(MAPREDUCE_FRAMEWORK_NAME_PROP); + return hadoopVersion != null && hadoopVersion.equals(YARN); + } + + /** + * Check if a Hadoop version matches the current Hadoop environment + * + * @param majorVersion the major version number of hadoop + * @return true if we have a major version match else false + */ + public static boolean matchCurrentHadoopVersion(int majorVersion) { + return majorVersion == 2 && isHadoop2Env(); + } +} diff --git a/app/com/linkedin/drelephant/analysis/Heuristic.java b/app/com/linkedin/drelephant/analysis/Heuristic.java index 073e90171..31e7a0a67 100644 --- a/app/com/linkedin/drelephant/analysis/Heuristic.java +++ b/app/com/linkedin/drelephant/analysis/Heuristic.java @@ -1,9 +1,42 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + package com.linkedin.drelephant.analysis; -import com.linkedin.drelephant.hadoop.HadoopJobData; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; + -public interface Heuristic { - public HeuristicResult apply(HadoopJobData data); +/** + * This interface defines the Heuristic rule interface. + * + * @param An implementation that extends from HadoopApplicationData + */ +public interface Heuristic { + /** + * Given an application data instance, returns the analyzed heuristic result. + * + * @param data The data to analyze + * @return The heuristic result + */ + public HeuristicResult apply(T data); - public String getHeuristicName(); + /** + * Get the heuristic Configuration + * + * @return the heuristic configuration data + */ + public HeuristicConfigurationData getHeuristicConfData(); } diff --git a/app/com/linkedin/drelephant/analysis/HeuristicResult.java b/app/com/linkedin/drelephant/analysis/HeuristicResult.java index c716f665d..b4519e982 100644 --- a/app/com/linkedin/drelephant/analysis/HeuristicResult.java +++ b/app/com/linkedin/drelephant/analysis/HeuristicResult.java @@ -1,66 +1,141 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + package com.linkedin.drelephant.analysis; -import com.linkedin.drelephant.util.Utils; import java.util.ArrayList; +import java.util.Collections; import java.util.List; +import org.apache.commons.lang.StringUtils; + +/** + * Holds the Heuristic analysis result Information + */ public class HeuristicResult { - private String analysis; - private Severity severity; - private List details; - private int detailsColumns = 0; - - public HeuristicResult(String analysis, Severity severity) { - this.analysis = analysis; - this.severity = severity; - this.details = new ArrayList(); - } - - public String getAnalysis() { - return analysis; - } - - public Severity getSeverity() { - return severity; - } - - /** - * Gets a list of lines of comma-separated strings - * - * @return - */ - public List getDetails() { - return details; - } - - /** - * Create a string that contains lines of comma-separated strings - * - * @return - */ - public String getDetailsCSV() { - return Utils.combineCsvLines(details.toArray(new String[details.size()])); - } - - /** - * Gets the number of columns in the csv formatted details store - * - * @return - */ - public int getDetailsColumns() { - return detailsColumns; - } - - /** - * Add a new line to the csv formatted details store - * - * @param parts strings to join into a single line - */ - public void addDetail(String... parts) { - details.add(Utils.createCsvLine(parts)); - if (parts.length > detailsColumns) { - detailsColumns = parts.length; - } - } + public static final HeuristicResult NO_DATA = new HeuristicResult("NoDataReceived", "No Data Received", Severity.LOW, + 0, Collections.singletonList(new HeuristicResultDetails("No Data Received", "", null))); + + private String _heuristicClass; + private String _heuristicName; + private Severity _severity; + private int _score; + private List _heuristicResultDetails; + + /** + * Heuristic Result Constructor + * + * @param heuristicClass The Heuristic class + * @param heuristicName The name of the Heursitic + * @param severity The severity of the result + * @param score The computed score + */ + public HeuristicResult(String heuristicClass, String heuristicName, Severity severity, int score) { + this._heuristicClass = heuristicClass; + this._heuristicName = heuristicName; + this._severity = severity; + this._score = score; + this._heuristicResultDetails = new ArrayList(); + } + + /** + * Heuristic Result Constructor + * + * @param heuristicClass The Heuristic class + * @param heuristicName The name of the Heursitic + * @param severity The severity of the result + * @param score The computed score + * @param heuristicResultDetails more information on the heuristic details. + */ + public HeuristicResult(String heuristicClass, String heuristicName, Severity severity, int score, + List heuristicResultDetails) { + this._heuristicClass = heuristicClass; + this._heuristicName = heuristicName; + this._severity = severity; + this._score = score; + this._heuristicResultDetails = heuristicResultDetails; + } + + /** + * Returns the heuristic analyser class name + * + * @return the heursitic class name + */ + public String getHeuristicClassName() { + return _heuristicClass; + } + + /** + * Returns the heuristic analyser name + * + * @return the heuristic name + */ + public String getHeuristicName() { + return _heuristicName; + } + + /** + * Returns the severity of the Heuristic + * + * @return The severity + */ + public Severity getSeverity() { + return _severity; + } + + public int getScore() { + return _score; + } + + /** + * Gets a list of HeuristicResultDetails + * + * @return + */ + public List getHeuristicResultDetails() { + return _heuristicResultDetails; + } + + /** + * Add the App Heuristic Result Detail entry + */ + public void addResultDetail(String name, String value, String details) { + _heuristicResultDetails.add(new HeuristicResultDetails(name, value, details)); + } + + /** + * Add the App Heuristic Result Detail without details + */ + public void addResultDetail(String name, String value) { + _heuristicResultDetails.add(new HeuristicResultDetails(name, value, null)); + } + + /** + * Set the severity of the heuristic + * + * @param severity The severity to be set + */ + public void setSeverity(Severity severity) { + this._severity = severity; + } + + @Override + public String toString() { + return "{analysis: " + _heuristicClass + ", severity: " + _severity + ", details: [" + + StringUtils.join(_heuristicResultDetails, " ") + "]}"; + } } diff --git a/app/com/linkedin/drelephant/analysis/HeuristicResultDetails.java b/app/com/linkedin/drelephant/analysis/HeuristicResultDetails.java new file mode 100644 index 000000000..caf2d8be6 --- /dev/null +++ b/app/com/linkedin/drelephant/analysis/HeuristicResultDetails.java @@ -0,0 +1,53 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.analysis; + +import java.util.ArrayList; +import java.util.List; + + +/** + * Holds the analysis details for each Heuristic + */ +public class HeuristicResultDetails { + + private String _name; + private String _value; + private String _details; + + public HeuristicResultDetails(String name, String value) { + this(name, value, null); + } + + public HeuristicResultDetails(String name, String value, String details) { + this._name = name; + this._value = value; + this._details = details; + } + + public String getDetails() { + return _details; + } + + public String getValue() { + return _value; + } + + public String getName() { + return _name; + } +} diff --git a/app/com/linkedin/drelephant/analysis/JobType.java b/app/com/linkedin/drelephant/analysis/JobType.java new file mode 100644 index 000000000..44cb8cb1d --- /dev/null +++ b/app/com/linkedin/drelephant/analysis/JobType.java @@ -0,0 +1,74 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.analysis; + +import java.util.Properties; +import java.util.regex.Pattern; + + +/** + * Job Type holder. Includes for each Job type, the configuration key that uniquely identifies that type and the + * regex pattern to match the property. + */ +public class JobType { + private final String _name; + private final String _confName; + private final Pattern _confPattern; + + /** + * Constructor for a JobType + * + * @param name The name of the job type + * @param confName The configuration to look into + * @param confPattern The regex pattern to match the configuration property + */ + public JobType(String name, String confName, String confPattern) { + _name = name; + _confName = confName; + _confPattern = Pattern.compile(confPattern); + } + + /** + * Check if a JobType matches a property + * + * @param jobProp The properties to match + * @return true if matched else false + */ + public boolean matchType(Properties jobProp) { + // Always return false if confName/confPattern is undefined, + // which means we cannot tell if the properties are matching the pattern + if (_confName == null || _confPattern == null) { + return false; + } + + return jobProp.containsKey(_confName) && _confPattern.matcher((String) jobProp.get(_confName)).matches(); + } + + /** + * Get the name of the job type + * + * @return The name + */ + public String getName() { + return _name; + } + + @Override + public String toString() { + return getName(); + } +} diff --git a/app/com/linkedin/drelephant/analysis/Metrics.java b/app/com/linkedin/drelephant/analysis/Metrics.java new file mode 100644 index 000000000..2ce3e8e0e --- /dev/null +++ b/app/com/linkedin/drelephant/analysis/Metrics.java @@ -0,0 +1,60 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.analysis; + +public enum Metrics { + + // Currently supported metrics + USED_RESOURCES("Used Resources", "resources", "The resources used by the job"), + WASTED_RESOURCES("Wasted Resources", "resources", "The resources wasted by the job"), + RUNTIME("Run Time", "time", "The run time of the job"), + WAIT_TIME("Wait Time", "time", "The wait time of the job"); + + private String text; + private String type; + private String description; + + Metrics(String text, String type, String description) { + this.text = text; + this.type = type; + this.description = description; + } + + /** + * Returns the value of the text for the metrics + * @return The text value + */ + public String getText() { + return text; + } + + /** + * Returns the type of the metrics. It can be one of resources or time + * @return The type of the metrics. + */ + public String getType() { + return type; + } + + /** + * Returns the description of the metrics + * @return The description of the metrics + */ + public String getDescription() { + return description; + } +} diff --git a/app/com/linkedin/drelephant/analysis/Severity.java b/app/com/linkedin/drelephant/analysis/Severity.java index ba0a1635e..8a35ca91a 100644 --- a/app/com/linkedin/drelephant/analysis/Severity.java +++ b/app/com/linkedin/drelephant/analysis/Severity.java @@ -1,105 +1,193 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + package com.linkedin.drelephant.analysis; import com.avaje.ebean.annotation.EnumValue; -public enum Severity { - @EnumValue("4") - CRITICAL(4, "Critical", "danger"), - - @EnumValue("3") - SEVERE(3, "Severe", "severe"), - - @EnumValue("2") - MODERATE(2, "Moderate", "warning"), - - @EnumValue("1") - LOW(1, "Low", "success"), - @EnumValue("0") - NONE(0, "None", "success"); - - private int value; - private String text; - private String bootstrapColor; - - Severity(int value, String text, String bootstrapColor) { - this.value = value; - this.text = text; - this.bootstrapColor = bootstrapColor; +/** + * The severities that you can use + */ +public enum Severity { + @EnumValue("4") + CRITICAL(4, "Critical", "danger"), + + @EnumValue("3") + SEVERE(3, "Severe", "severe"), + + @EnumValue("2") + MODERATE(2, "Moderate", "warning"), + + @EnumValue("1") + LOW(1, "Low", "success"), + + @EnumValue("0") + NONE(0, "None", "success"); + + private int _value; + private String _text; + private String _bootstrapColor; + + /** + * @param value The severity value + * @param text The severity name + * @param bootstrapColor The severity level for color coding + */ + Severity(int value, String text, String bootstrapColor) { + this._value = value; + this._text = text; + this._bootstrapColor = bootstrapColor; + } + + /** + * Returns the severity level + * + * @return The severity value (0 to 5) + */ + public int getValue() { + return _value; + } + + /** + * Returns the Severity level Name + * + * @return Severity level (None, Low, Moderate, Sever, Critical) + */ + public String getText() { + return _text; + } + + /** + * Returns the severity level for color coding + * + * @return The severity level (color) + */ + public String getBootstrapColor() { + return _bootstrapColor; + } + + /** + * Returns the Severity corresponding to the severity value, NONE severity otherwise + * + * @param value The severity values (0 to 5) + * @return The severity + */ + public static Severity byValue(int value) { + for (Severity severity : values()) { + if (severity._value == value) { + return severity; + } } - - public int getValue() { - return value; + return NONE; + } + + /** + * Returns the maximum of the severities + * + * @param a One severity + * @param b The other severity + * @return Max(a,b) + */ + public static Severity max(Severity a, Severity b) { + if (a._value > b._value) { + return a; } - - public String getText() { - return text; + return b; + } + + /** + * Returns the maximum of the severities in the array + * + * @param severities Arbitrary number of severities + * @return Max(severities) + */ + public static Severity max(Severity... severities) { + Severity currentSeverity = NONE; + for (Severity severity : severities) { + currentSeverity = max(currentSeverity, severity); } - - public String getBootstrapColor() { - return bootstrapColor; + return currentSeverity; + } + + /** + * Returns the minimum of the severities + * + * @param a One severity + * @param b The other severity + * @return Min(a,b) + */ + public static Severity min(Severity a, Severity b) { + if (a._value < b._value) { + return a; } - - public static Severity byValue(int value) { - for (Severity severity : values()) { - if (severity.value == value) { - return severity; - } - } - return NONE; + return b; + } + + /** + * Returns the severity level of the value in the given thresholds + * low < moderate < severe < critical + * + * Critical when value is greater than the critical threshold + * None when the value is less than the low threshold. + * + * @param value The value being tested + * @return One of the 5 severity levels + */ + public static Severity getSeverityAscending(Number value, Number low, Number moderate, Number severe, + Number critical) { + if (value.doubleValue() >= critical.doubleValue()) { + return CRITICAL; } - - public static Severity max(Severity a, Severity b) { - if (a.value > b.value) { - return a; - } - return b; + if (value.doubleValue() >= severe.doubleValue()) { + return SEVERE; } - - public static Severity max(Severity... severities) { - Severity currentSeverity = NONE; - for (Severity severity : severities) { - currentSeverity = max(currentSeverity, severity); - } - return currentSeverity; + if (value.doubleValue() >= moderate.doubleValue()) { + return MODERATE; } - - public static Severity min(Severity a, Severity b) { - if (a.value < b.value) { - return a; - } - return b; + if (value.doubleValue() >= low.doubleValue()) { + return LOW; } - - public static Severity getSeverityAscending(long value, long low, long moderate, long severe, long critical) { - if (value >= critical) { - return CRITICAL; - } - if (value >= severe) { - return SEVERE; - } - if (value >= moderate) { - return MODERATE; - } - if (value >= low) { - return LOW; - } - return NONE; + return NONE; + } + + /** + * Returns the severity level of the value in the given thresholds + * low > moderate > severe > critical + * + * Critical when value is less than the critical threshold + * None when the value is greater than the low threshold. + * + * @param value The value being tested + * @return One of the 5 severity levels + */ + public static Severity getSeverityDescending(Number value, Number low, Number moderate, Number severe, + Number critical) { + if (value.doubleValue() <= critical.doubleValue()) { + return CRITICAL; } - - public static Severity getSeverityDescending(long value, long low, long moderate, long severe, long critical) { - if (value <= critical) { - return CRITICAL; - } - if (value <= severe) { - return SEVERE; - } - if (value <= moderate) { - return MODERATE; - } - if (value <= low) { - return LOW; - } - return NONE; + if (value.doubleValue() <= severe.doubleValue()) { + return SEVERE; + } + if (value.doubleValue() <= moderate.doubleValue()) { + return MODERATE; + } + if (value.doubleValue() <= low.doubleValue()) { + return LOW; } + return NONE; + } } diff --git a/app/com/linkedin/drelephant/analysis/SeverityThresholds.scala b/app/com/linkedin/drelephant/analysis/SeverityThresholds.scala new file mode 100644 index 000000000..7359c7cac --- /dev/null +++ b/app/com/linkedin/drelephant/analysis/SeverityThresholds.scala @@ -0,0 +1,53 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.analysis + +import com.linkedin.drelephant.util.Utils + + +/** + * A convenience case class for containing severity thresholds and calculating severity. + */ +case class SeverityThresholds(low: Number, moderate: Number, severe: Number, critical: Number, ascending: Boolean) { + if (ascending) { + require(low.doubleValue <= moderate.doubleValue) + require(moderate.doubleValue <= severe.doubleValue) + require(severe.doubleValue <= critical.doubleValue) + } else { + require(low.doubleValue >= moderate.doubleValue) + require(moderate.doubleValue >= severe.doubleValue) + require(severe.doubleValue >= critical.doubleValue) + } + + def severityOf(value: Number): Severity = if (ascending) { + Severity.getSeverityAscending(value, low, moderate, severe, critical) + } else { + Severity.getSeverityDescending(value, low, moderate, severe, critical) + } +} + +object SeverityThresholds { + val NUM_THRESHOLDS = 4 + + /** Returns a SeverityThresholds object from a Dr. Elephant configuration string parseable by Utils.getParam(String, int). */ + def parse( + rawString: String, + ascending: Boolean + ): Option[SeverityThresholds] = Option(Utils.getParam(rawString, NUM_THRESHOLDS)).map { thresholds => + SeverityThresholds(low = thresholds(0), moderate = thresholds(1), severe = thresholds(2), critical = thresholds(3), ascending) + } +} diff --git a/app/com/linkedin/drelephant/analysis/heuristics/GenericDataSkewHeuristic.java b/app/com/linkedin/drelephant/analysis/heuristics/GenericDataSkewHeuristic.java deleted file mode 100644 index ad9f63c51..000000000 --- a/app/com/linkedin/drelephant/analysis/heuristics/GenericDataSkewHeuristic.java +++ /dev/null @@ -1,82 +0,0 @@ -package com.linkedin.drelephant.analysis.heuristics; - -import com.linkedin.drelephant.analysis.Constants; -import com.linkedin.drelephant.analysis.Heuristic; -import com.linkedin.drelephant.analysis.HeuristicResult; -import com.linkedin.drelephant.analysis.Severity; -import com.linkedin.drelephant.hadoop.HadoopCounterHolder; -import com.linkedin.drelephant.hadoop.HadoopJobData; -import com.linkedin.drelephant.hadoop.HadoopTaskData; -import com.linkedin.drelephant.math.Statistics; -import org.apache.commons.io.FileUtils; - -public abstract class GenericDataSkewHeuristic implements Heuristic { - private HadoopCounterHolder.CounterName counterName; - private String heuristicName; - - @Override - public String getHeuristicName() { - return heuristicName; - } - - protected GenericDataSkewHeuristic(HadoopCounterHolder.CounterName counterName, String heuristicName) { - this.counterName = counterName; - this.heuristicName = heuristicName; - } - - protected abstract HadoopTaskData[] getTasks(HadoopJobData data); - - @Override - public HeuristicResult apply(HadoopJobData data) { - HadoopTaskData[] tasks = getTasks(data); - - //Gather data - long[] inputBytes = new long[tasks.length]; - - for (int i = 0; i < tasks.length; i++) { - inputBytes[i] = tasks[i].getCounters().get(counterName); - } - - //Analyze data - long[][] groups = Statistics.findTwoGroups(inputBytes); - - long avg1 = Statistics.average(groups[0]); - long avg2 = Statistics.average(groups[1]); - - long min = Math.min(avg1, avg2); - long diff = Math.abs(avg2 - avg1); - - Severity severity = getDeviationSeverity(min, diff); - - //This reduces severity if the largest file sizes are insignificant - severity = Severity.min(severity, getFilesSeverity(avg2)); - - //This reduces severity if number of tasks is insignificant - severity = Severity.min(severity, Statistics.getNumTasksSeverity(groups[0].length)); - - HeuristicResult result = new HeuristicResult(heuristicName, severity); - - result.addDetail("Number of tasks", Integer.toString(tasks.length)); - result.addDetail("Group A", groups[0].length + " tasks @ " + FileUtils.byteCountToDisplaySize(avg1) + " avg"); - result.addDetail("Group B", groups[1].length + " tasks @ " + FileUtils.byteCountToDisplaySize(avg2) + " avg"); - - return result; - } - - public static Severity getDeviationSeverity(long averageMin, long averageDiff) { - if (averageMin <= 0) { - averageMin = 1; - } - long value = averageDiff / averageMin; - return Severity.getSeverityAscending(value, - 2, 4, 8, 16); - } - - public static Severity getFilesSeverity(long value) { - return Severity.getSeverityAscending(value, - Constants.HDFS_BLOCK_SIZE / 8, - Constants.HDFS_BLOCK_SIZE / 4, - Constants.HDFS_BLOCK_SIZE / 2, - Constants.HDFS_BLOCK_SIZE); - } -} diff --git a/app/com/linkedin/drelephant/analysis/heuristics/MapperDataSkewHeuristic.java b/app/com/linkedin/drelephant/analysis/heuristics/MapperDataSkewHeuristic.java deleted file mode 100644 index 43d806f5d..000000000 --- a/app/com/linkedin/drelephant/analysis/heuristics/MapperDataSkewHeuristic.java +++ /dev/null @@ -1,18 +0,0 @@ -package com.linkedin.drelephant.analysis.heuristics; - -import com.linkedin.drelephant.hadoop.HadoopCounterHolder; -import com.linkedin.drelephant.hadoop.HadoopJobData; -import com.linkedin.drelephant.hadoop.HadoopTaskData; - -public class MapperDataSkewHeuristic extends GenericDataSkewHeuristic { - public static final String heuristicName = "Mapper Data Skew"; - - public MapperDataSkewHeuristic() { - super(HadoopCounterHolder.CounterName.HDFS_BYTES_READ, heuristicName); - } - - @Override - protected HadoopTaskData[] getTasks(HadoopJobData data) { - return data.getMapperData(); - } -} \ No newline at end of file diff --git a/app/com/linkedin/drelephant/analysis/heuristics/MapperInputSizeHeuristic.java b/app/com/linkedin/drelephant/analysis/heuristics/MapperInputSizeHeuristic.java deleted file mode 100644 index 3bc906747..000000000 --- a/app/com/linkedin/drelephant/analysis/heuristics/MapperInputSizeHeuristic.java +++ /dev/null @@ -1,84 +0,0 @@ -package com.linkedin.drelephant.analysis.heuristics; - -import com.linkedin.drelephant.analysis.Constants; -import com.linkedin.drelephant.analysis.Heuristic; -import com.linkedin.drelephant.analysis.HeuristicResult; -import com.linkedin.drelephant.analysis.Severity; -import com.linkedin.drelephant.hadoop.HadoopCounterHolder; -import com.linkedin.drelephant.hadoop.HadoopJobData; -import com.linkedin.drelephant.hadoop.HadoopTaskData; -import com.linkedin.drelephant.math.Statistics; -import org.apache.commons.io.FileUtils; - -public class MapperInputSizeHeuristic implements Heuristic { - public static final String heuristicName = "Mapper Input Size"; - - @Override - public String getHeuristicName() { - return heuristicName; - } - - @Override - public HeuristicResult apply(HadoopJobData data) { - HadoopTaskData[] tasks = data.getMapperData(); - - //Gather data - long[] inputBytes = new long[tasks.length]; - - for (int i = 0; i < tasks.length; i++) { - inputBytes[i] = tasks[i].getCounters().get(HadoopCounterHolder.CounterName.HDFS_BYTES_READ); - } - - //Analyze data - long average = Statistics.average(inputBytes); - - Severity smallFilesSeverity = smallFilesSeverity(average, tasks.length); - Severity largeFilesSeverity = largeFilesSeverity(average, tasks.length); - Severity severity = Severity.max(smallFilesSeverity, largeFilesSeverity); - - HeuristicResult result = new HeuristicResult(heuristicName, severity); - - result.addDetail("Number of tasks", Integer.toString(tasks.length)); - result.addDetail("Average task input", FileUtils.byteCountToDisplaySize(average)); - - return result; - } - - private Severity smallFilesSeverity(long value, long numTasks) { - Severity severity = getSmallFilesSeverity(value); - Severity taskSeverity = getNumTasksSeverity(numTasks); - return Severity.min(severity, taskSeverity); - } - - private Severity largeFilesSeverity(long value, long numTasks) { - Severity severity = getLargeFilesSeverity(value); - Severity taskSeverity = getNumTasksSeverityReverse(numTasks); - return Severity.min(severity, taskSeverity); - } - - public static Severity getSmallFilesSeverity(long value) { - return Severity.getSeverityDescending(value, - Constants.HDFS_BLOCK_SIZE / 2, - Constants.HDFS_BLOCK_SIZE / 4, - Constants.HDFS_BLOCK_SIZE / 8, - Constants.HDFS_BLOCK_SIZE / 32); - } - - public static Severity getLargeFilesSeverity(long value) { - return Severity.getSeverityAscending(value, - Constants.HDFS_BLOCK_SIZE * 2, - Constants.HDFS_BLOCK_SIZE * 3, - Constants.HDFS_BLOCK_SIZE * 4, - Constants.HDFS_BLOCK_SIZE * 5); - } - - public static Severity getNumTasksSeverity(long numTasks) { - return Severity.getSeverityAscending(numTasks, - 10, 50, 200, 500); - } - - public static Severity getNumTasksSeverityReverse(long numTasks) { - return Severity.getSeverityDescending(numTasks, - 1000, 500, 200, 100); - } -} diff --git a/app/com/linkedin/drelephant/analysis/heuristics/MapperSpeedHeuristic.java b/app/com/linkedin/drelephant/analysis/heuristics/MapperSpeedHeuristic.java deleted file mode 100644 index 8046ef5ce..000000000 --- a/app/com/linkedin/drelephant/analysis/heuristics/MapperSpeedHeuristic.java +++ /dev/null @@ -1,85 +0,0 @@ -package com.linkedin.drelephant.analysis.heuristics; - -import java.util.ArrayList; -import java.util.List; - -import com.linkedin.drelephant.analysis.Constants; -import com.linkedin.drelephant.analysis.Heuristic; -import com.linkedin.drelephant.analysis.HeuristicResult; -import com.linkedin.drelephant.analysis.Severity; -import com.linkedin.drelephant.hadoop.HadoopCounterHolder; -import com.linkedin.drelephant.hadoop.HadoopJobData; -import com.linkedin.drelephant.hadoop.HadoopTaskData; -import com.linkedin.drelephant.math.Statistics; - -import org.apache.commons.io.FileUtils; - -public class MapperSpeedHeuristic implements Heuristic { - public static final String heuristicName = "Mapper Speed"; - - @Override - public String getHeuristicName() { - return heuristicName; - } - - @Override - public HeuristicResult apply(HadoopJobData data) { - - HadoopTaskData[] tasks = data.getMapperData(); - - List input_byte_sizes = new ArrayList(); - List speeds = new ArrayList(); - List runtimes = new ArrayList(); - - for(HadoopTaskData task : tasks) { - if(task.timed()) { - long input_bytes = task.getCounters().get(HadoopCounterHolder.CounterName.HDFS_BYTES_READ); - long runtime = task.getEndTime() - task.getStartTime(); - //Apply 1 minute buffer - runtime -= 60 * 1000; - if (runtime < 1000) { - runtime = 1000; - } - input_byte_sizes.add(input_bytes); - runtimes.add(runtime); - //Speed is bytes per second - speeds.add((1000 * input_bytes) / (runtime)); - } - } - - //Analyze data - long averageSpeed = Statistics.average(speeds); - long averageSize = Statistics.average(input_byte_sizes); - long averageRuntime = Statistics.average(runtimes); - - Severity severity = getDiskSpeedSeverity(averageSpeed); - - //This reduces severity if task runtime is insignificant - severity = Severity.min(severity, getRuntimeSeverity(averageRuntime)); - - HeuristicResult result = new HeuristicResult(heuristicName, severity); - - result.addDetail("Number of tasks", Integer.toString(tasks.length)); - result.addDetail("Average task input size", FileUtils.byteCountToDisplaySize(averageSize)); - result.addDetail("Average task speed", FileUtils.byteCountToDisplaySize(averageSpeed) + "/s"); - result.addDetail("Average task runtime", Statistics.readableTimespan(averageRuntime)); - - return result; - } - - public static Severity getDiskSpeedSeverity(long speed) { - return Severity.getSeverityDescending(speed, - Constants.DISK_READ_SPEED / 2, - Constants.DISK_READ_SPEED / 4, - Constants.DISK_READ_SPEED / 8, - Constants.DISK_READ_SPEED / 32); - } - - public static Severity getRuntimeSeverity(long runtime) { - return Severity.getSeverityAscending(runtime, - 5 * Statistics.MINUTE, - 20 * Statistics.MINUTE, - 40 * Statistics.MINUTE, - 1 * Statistics.HOUR); - } -} diff --git a/app/com/linkedin/drelephant/analysis/heuristics/ReducerDataSkewHeuristic.java b/app/com/linkedin/drelephant/analysis/heuristics/ReducerDataSkewHeuristic.java deleted file mode 100644 index 72c6ec499..000000000 --- a/app/com/linkedin/drelephant/analysis/heuristics/ReducerDataSkewHeuristic.java +++ /dev/null @@ -1,18 +0,0 @@ -package com.linkedin.drelephant.analysis.heuristics; - -import com.linkedin.drelephant.hadoop.HadoopCounterHolder; -import com.linkedin.drelephant.hadoop.HadoopJobData; -import com.linkedin.drelephant.hadoop.HadoopTaskData; - -public class ReducerDataSkewHeuristic extends GenericDataSkewHeuristic { - public static final String heuristicName = "Reducer Data Skew"; - - public ReducerDataSkewHeuristic() { - super(HadoopCounterHolder.CounterName.REDUCE_SHUFFLE_BYTES, heuristicName); - } - - @Override - protected HadoopTaskData[] getTasks(HadoopJobData data) { - return data.getReducerData(); - } -} diff --git a/app/com/linkedin/drelephant/analysis/heuristics/ReducerTimeHeuristic.java b/app/com/linkedin/drelephant/analysis/heuristics/ReducerTimeHeuristic.java deleted file mode 100644 index 4982000f9..000000000 --- a/app/com/linkedin/drelephant/analysis/heuristics/ReducerTimeHeuristic.java +++ /dev/null @@ -1,85 +0,0 @@ -package com.linkedin.drelephant.analysis.heuristics; - -import java.util.ArrayList; -import java.util.List; - -import com.linkedin.drelephant.analysis.Heuristic; -import com.linkedin.drelephant.analysis.HeuristicResult; -import com.linkedin.drelephant.analysis.Severity; -import com.linkedin.drelephant.hadoop.HadoopJobData; -import com.linkedin.drelephant.hadoop.HadoopTaskData; -import com.linkedin.drelephant.math.Statistics; - -public class ReducerTimeHeuristic implements Heuristic { - public static final String heuristicName = "Reducer Time"; - - @Override - public String getHeuristicName() { - return heuristicName; - } - - @Override - public HeuristicResult apply(HadoopJobData data) { - HadoopTaskData[] tasks = data.getReducerData(); - - List runTimes = new ArrayList(); - - for(HadoopTaskData task : tasks) { - if(task.timed()) { - runTimes.add(task.getRunTime()); - } - } - - //Analyze data - long averageRuntime = Statistics.average(runTimes); - - Severity shortTimeSeverity = shortTimeSeverity(averageRuntime, tasks.length); - Severity longTimeSeverity = longTimeSeverity(averageRuntime, tasks.length); - Severity severity = Severity.max(shortTimeSeverity, longTimeSeverity); - - HeuristicResult result = new HeuristicResult(heuristicName, severity); - - result.addDetail("Number of tasks", Integer.toString(tasks.length)); - result.addDetail("Average task time", Statistics.readableTimespan(averageRuntime)); - - return result; - } - - private Severity shortTimeSeverity(long runtime, long numTasks) { - Severity timeSeverity = getShortRuntimeSeverity(runtime); - Severity taskSeverity = getNumTasksSeverity(numTasks); - return Severity.min(timeSeverity, taskSeverity); - } - - private Severity longTimeSeverity(long runtime, long numTasks) { - Severity timeSeverity = getLongRuntimeSeverity(runtime); - Severity taskSeverity = getNumTasksSeverityReverse(numTasks); - return Severity.min(timeSeverity, taskSeverity); - } - - public static Severity getShortRuntimeSeverity(long runtime) { - return Severity.getSeverityDescending(runtime, - 10 * Statistics.MINUTE, - 5 * Statistics.MINUTE, - 2 * Statistics.MINUTE, - 1 * Statistics.MINUTE); - } - - public static Severity getLongRuntimeSeverity(long runtime) { - return Severity.getSeverityAscending(runtime, - 15 * Statistics.MINUTE, - 30 * Statistics.MINUTE, - 1 * Statistics.HOUR, - 2 * Statistics.HOUR); - } - - public static Severity getNumTasksSeverity(long numTasks) { - return Severity.getSeverityAscending(numTasks, - 10, 50, 200, 500); - } - - public static Severity getNumTasksSeverityReverse(long numTasks) { - return Severity.getSeverityDescending(numTasks, - 100, 50, 20, 10); - } -} diff --git a/app/com/linkedin/drelephant/analysis/heuristics/ShuffleSortHeuristic.java b/app/com/linkedin/drelephant/analysis/heuristics/ShuffleSortHeuristic.java deleted file mode 100644 index e00964ccd..000000000 --- a/app/com/linkedin/drelephant/analysis/heuristics/ShuffleSortHeuristic.java +++ /dev/null @@ -1,77 +0,0 @@ -package com.linkedin.drelephant.analysis.heuristics; - -import java.util.ArrayList; -import java.util.List; - -import com.linkedin.drelephant.analysis.Constants; -import com.linkedin.drelephant.analysis.Heuristic; -import com.linkedin.drelephant.analysis.HeuristicResult; -import com.linkedin.drelephant.analysis.Severity; -import com.linkedin.drelephant.hadoop.HadoopJobData; -import com.linkedin.drelephant.hadoop.HadoopTaskData; -import com.linkedin.drelephant.math.Statistics; - -public class ShuffleSortHeuristic implements Heuristic { - public static final String heuristicName = "Shuffle & Sort"; - - @Override - public String getHeuristicName() { - return heuristicName; - } - - @Override - public HeuristicResult apply(HadoopJobData data) { - - HadoopTaskData[] tasks = data.getReducerData(); - - List execTime = new ArrayList(); - List shuffleTime = new ArrayList(); - List sortTime = new ArrayList(); - - for(HadoopTaskData task : tasks) { - if(task.timed()) { - execTime.add(task.getExecutionTime()); - shuffleTime.add(task.getShuffleTime()); - sortTime.add(task.getSortTime()); - } - } - - //Analyze data - long avgExecTime = Statistics.average(execTime); - long avgShuffleTime = Statistics.average(shuffleTime); - long avgSortTime = Statistics.average(sortTime); - - - Severity shuffleSeverity = getShuffleSortSeverity(avgShuffleTime, avgExecTime); - Severity sortSeverity = getShuffleSortSeverity(avgSortTime, avgExecTime); - Severity severity = Severity.max(shuffleSeverity, sortSeverity); - - HeuristicResult result = new HeuristicResult(heuristicName, severity); - - result.addDetail("Number of tasks", Integer.toString(data.getReducerData().length)); - result.addDetail("Average code runtime", Statistics.readableTimespan(avgExecTime)); - String shuffleFactor = Statistics.describeFactor(avgShuffleTime, avgExecTime, "x"); - result.addDetail("Average shuffle time", Statistics.readableTimespan(avgShuffleTime) + " " + shuffleFactor); - String sortFactor = Statistics.describeFactor(avgSortTime, avgExecTime, "x"); - result.addDetail("Average sort time", Statistics.readableTimespan(avgSortTime) + " " + sortFactor); - - return result; - } - - public static Severity getShuffleSortSeverity(long runtime, long codetime) { - Severity runtimeSeverity = Severity.getSeverityAscending(runtime, - 1 * Statistics.MINUTE, - 5 * Statistics.MINUTE, - 10 * Statistics.MINUTE, - 30 * Statistics.MINUTE); - - if (codetime <= 0) { - return runtimeSeverity; - } - long value = runtime * 2 / codetime; - Severity runtimeRatioSeverity = Severity.getSeverityAscending(value, - 1, 2, 4, 8); - - return Severity.min(runtimeSeverity, runtimeRatioSeverity); - } -} \ No newline at end of file diff --git a/app/com/linkedin/drelephant/configurations/aggregator/AggregatorConfiguration.java b/app/com/linkedin/drelephant/configurations/aggregator/AggregatorConfiguration.java new file mode 100644 index 000000000..62120462a --- /dev/null +++ b/app/com/linkedin/drelephant/configurations/aggregator/AggregatorConfiguration.java @@ -0,0 +1,102 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.configurations.aggregator; + +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import com.linkedin.drelephant.util.Utils; +import java.util.List; +import org.apache.log4j.Logger; + +import com.linkedin.drelephant.analysis.ApplicationType; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; + +import java.util.Map; +import org.apache.log4j.Logger; +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; + +public class AggregatorConfiguration { + + private static final Logger logger = Logger.getLogger(AggregatorConfiguration.class); + private List _aggregatorsConfDataList; + + public AggregatorConfiguration(Element configuration) { + parseAggregatorConfiguration(configuration); + } + + /** + * Returns the list of Aggregators along with their Configuration Information + * + * @return A list of Configuration Data for the aggregators + */ + public List getAggregatorsConfigurationData() { + return _aggregatorsConfDataList; + } + + /** + * Parses the Aggregator configuration file and loads the Aggregator Information to a list of AggregatorConfigurationData + * + * @param configuration The dom Element to be parsed + */ + private void parseAggregatorConfiguration(Element configuration) { + _aggregatorsConfDataList = new ArrayList(); + + NodeList nodes = configuration.getChildNodes(); + int n = 0; + for (int i = 0; i < nodes.getLength(); i++) { + Node node = nodes.item(i); + if (node.getNodeType() == Node.ELEMENT_NODE) { + n++; + Element aggregatorNode = (Element) node; + + String className; + Node classNameNode = aggregatorNode.getElementsByTagName("classname").item(0); + if (classNameNode == null) { + throw new RuntimeException("No tag 'classname' in aggregator " + n); + } + className = classNameNode.getTextContent(); + if (className.equals("")) { + throw new RuntimeException("Empty tag 'classname' in aggregator " + n); + } + + Node appTypeNode = aggregatorNode.getElementsByTagName("applicationtype").item(0); + if (appTypeNode == null) { + throw new RuntimeException( + "No tag or invalid tag 'applicationtype' in aggregator " + n + " classname " + className); + } + String appTypeStr = appTypeNode.getTextContent(); + if (appTypeStr == null) { + logger.error("Application type is not specified in aggregator " + n + " classname " + className + + ". Skipping this configuration."); + continue; + } + ApplicationType appType = new ApplicationType(appTypeStr); + // Check if parameters are defined for the heuristic + Map paramsMap = Utils.getConfigurationParameters(aggregatorNode); + + AggregatorConfigurationData aggregatorData = new AggregatorConfigurationData(className, appType, paramsMap); + + _aggregatorsConfDataList.add(aggregatorData); + + } + } + } + +} diff --git a/app/com/linkedin/drelephant/configurations/aggregator/AggregatorConfigurationData.java b/app/com/linkedin/drelephant/configurations/aggregator/AggregatorConfigurationData.java new file mode 100644 index 000000000..f0b54b454 --- /dev/null +++ b/app/com/linkedin/drelephant/configurations/aggregator/AggregatorConfigurationData.java @@ -0,0 +1,47 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.configurations.aggregator; + +import com.linkedin.drelephant.analysis.ApplicationType; +import java.util.HashMap; +import java.util.Map; + + +public class AggregatorConfigurationData { + + private final String _className; + private final ApplicationType _appType; + private final Map _paramMap; + + public AggregatorConfigurationData(String className, ApplicationType appType, Map paramMap) { + _className = className; + _appType = appType; + _paramMap = paramMap == null ? new HashMap() : paramMap; + } + + public String getClassName() { + return _className; + } + + public ApplicationType getAppType() { + return _appType; + } + + public Map getParamMap() { + return _paramMap; + } +} diff --git a/app/com/linkedin/drelephant/configurations/fetcher/FetcherConfiguration.java b/app/com/linkedin/drelephant/configurations/fetcher/FetcherConfiguration.java new file mode 100644 index 000000000..bf31d0a49 --- /dev/null +++ b/app/com/linkedin/drelephant/configurations/fetcher/FetcherConfiguration.java @@ -0,0 +1,103 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.configurations.fetcher; + +import com.linkedin.drelephant.analysis.ApplicationType; +import com.linkedin.drelephant.util.Utils; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; + +import java.util.Map; +import org.apache.log4j.Logger; +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; + + +/** + * This class manages the Fetcher Configurations + */ +public class FetcherConfiguration { + private static final Logger logger = Logger.getLogger(FetcherConfiguration.class); + private List _fetchersConfDataList; + + public FetcherConfiguration(Element configuration) { + parseFetcherConfiguration(configuration); + } + + /** + * Returns the list of Fetchers along with their Configuration Information + * + * @return A list of Configuration Data for the fetchers + */ + public List getFetchersConfigurationData() { + return _fetchersConfDataList; + } + + /** + * Parses the Fetcher configuration file and loads the Fetcher Information to a list of FetcherConfigurationData + * + * @param configuration The dom Element to be parsed + */ + private void parseFetcherConfiguration(Element configuration) { + _fetchersConfDataList = new ArrayList(); + + NodeList nodes = configuration.getChildNodes(); + int n = 0; + for (int i = 0; i < nodes.getLength(); i++) { + // Each heuristic node + Node node = nodes.item(i); + if (node.getNodeType() == Node.ELEMENT_NODE) { + n++; + Element fetcherElem = (Element) node; + + String className; + Node classNameNode = fetcherElem.getElementsByTagName("classname").item(0); + if (classNameNode == null) { + throw new RuntimeException("No tag 'classname' in fetcher " + n); + } + className = classNameNode.getTextContent(); + if (className.equals("")) { + throw new RuntimeException("Empty tag 'classname' in fetcher " + n); + } + + Node appTypeNode = fetcherElem.getElementsByTagName("applicationtype").item(0); + if (appTypeNode == null) { + throw new RuntimeException( + "No tag or invalid tag 'applicationtype' in fetcher " + n + " classname " + className); + } + String appTypeStr = appTypeNode.getTextContent(); + if (appTypeStr == null) { + logger.error("Application type is not specified in fetcher " + n + " classname " + className + + ". Skipping this configuration."); + continue; + } + ApplicationType appType = new ApplicationType(appTypeStr); + + // Check if parameters are defined for the heuristic + Map paramsMap = Utils.getConfigurationParameters(fetcherElem); + + FetcherConfigurationData fetcherData = new FetcherConfigurationData(className, appType, paramsMap); + _fetchersConfDataList.add(fetcherData); + + } + } + } + +} diff --git a/app/com/linkedin/drelephant/configurations/fetcher/FetcherConfigurationData.java b/app/com/linkedin/drelephant/configurations/fetcher/FetcherConfigurationData.java new file mode 100644 index 000000000..6088d7ed2 --- /dev/null +++ b/app/com/linkedin/drelephant/configurations/fetcher/FetcherConfigurationData.java @@ -0,0 +1,48 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.configurations.fetcher; + +import com.linkedin.drelephant.analysis.ApplicationType; +import java.util.Map; + + +/** + * The Fetcher Configuration Holder + */ +public class FetcherConfigurationData { + private final String _className; + private final ApplicationType _appType; + private final Map _paramMap; + + public FetcherConfigurationData(String className, ApplicationType appType, Map paramMap) { + _className = className; + _appType = appType; + _paramMap = paramMap; + } + + public String getClassName() { + return _className; + } + + public ApplicationType getAppType() { + return _appType; + } + + public Map getParamMap() { + return _paramMap; + } +} diff --git a/app/com/linkedin/drelephant/configurations/heuristic/HeuristicConfiguration.java b/app/com/linkedin/drelephant/configurations/heuristic/HeuristicConfiguration.java new file mode 100644 index 000000000..8528ece5c --- /dev/null +++ b/app/com/linkedin/drelephant/configurations/heuristic/HeuristicConfiguration.java @@ -0,0 +1,114 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.configurations.heuristic; + +import com.linkedin.drelephant.analysis.ApplicationType; +import com.linkedin.drelephant.util.Utils; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; + +import java.util.Map; +import org.apache.log4j.Logger; +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; + + +/** + * This class manages the Heuristic Configurations + */ +public class HeuristicConfiguration { + private static final Logger logger = Logger.getLogger(HeuristicConfiguration.class); + private List _heuristicsConfDataList; + + public HeuristicConfiguration(Element configuration) { + parseHeuristicConfiguration(configuration); + } + + public List getHeuristicsConfigurationData() { + return _heuristicsConfDataList; + } + + private void parseHeuristicConfiguration(Element configuration) { + _heuristicsConfDataList = new ArrayList(); + + NodeList nodes = configuration.getChildNodes(); + int n = 0; + for (int i = 0; i < nodes.getLength(); i++) { + // Each heuristic node + Node node = nodes.item(i); + if (node.getNodeType() == Node.ELEMENT_NODE) { + n++; + Element heuristicElem = (Element) node; + + String className; + Node classNameNode = heuristicElem.getElementsByTagName("classname").item(0); + if (classNameNode == null) { + throw new RuntimeException("No tag 'classname' in heuristic " + n); + } + className = classNameNode.getTextContent(); + if (className.equals("")) { + throw new RuntimeException("Empty tag 'classname' in heuristic " + n); + } + + String heuristicName; + Node heuristicNameNode = heuristicElem.getElementsByTagName("heuristicname").item(0); + if (heuristicNameNode == null) { + throw new RuntimeException("No tag 'heuristicname' in heuristic " + n + " classname " + className); + } + heuristicName = heuristicNameNode.getTextContent(); + if (heuristicName.equals("")) { + throw new RuntimeException("Empty tag 'heuristicname' in heuristic " + n + " classname " + className); + } + + String viewName; + Node viewNameNode = heuristicElem.getElementsByTagName("viewname").item(0); + if (viewNameNode == null) { + throw new RuntimeException("No tag 'viewname' in heuristic " + n + " classname " + className); + } + viewName = viewNameNode.getTextContent(); + if (viewName.equals("")) { + throw new RuntimeException("Empty tag 'viewname' in heuristic " + n + " classname " + className); + } + + Node appTypeNode = heuristicElem.getElementsByTagName("applicationtype").item(0); + if (appTypeNode == null) { + throw new RuntimeException( + "No tag or invalid tag 'applicationtype' in heuristic " + n + " classname " + className); + } + String appTypeStr = appTypeNode.getTextContent(); + if (appTypeStr == null) { + logger.error("Application type is not specified in heuristic " + n + " classname " + className + + ". Skipping this configuration."); + continue; + } + ApplicationType appType = new ApplicationType(appTypeStr); + + // Check if parameters are defined for the heuristic + Map paramsMap = Utils.getConfigurationParameters(heuristicElem); + + HeuristicConfigurationData heuristicData = new HeuristicConfigurationData(heuristicName, className, viewName, + appType, paramsMap); + _heuristicsConfDataList.add(heuristicData); + + } + } + } + +} diff --git a/app/com/linkedin/drelephant/configurations/heuristic/HeuristicConfigurationData.java b/app/com/linkedin/drelephant/configurations/heuristic/HeuristicConfigurationData.java new file mode 100644 index 000000000..6bcc47012 --- /dev/null +++ b/app/com/linkedin/drelephant/configurations/heuristic/HeuristicConfigurationData.java @@ -0,0 +1,61 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.configurations.heuristic; + +import com.linkedin.drelephant.analysis.ApplicationType; +import java.util.Map; + + +/** + * The Heuristic Configuration Holder + */ +public class HeuristicConfigurationData { + private final String _heuristicName; + private final String _className; + private final String _viewName; + private final ApplicationType _appType; + private final Map _paramMap; + + public HeuristicConfigurationData(String heuristicName, String className, String viewName, ApplicationType appType, + Map paramMap) { + _heuristicName = heuristicName; + _className = className; + _viewName = viewName; + _appType = appType; + _paramMap = paramMap; + } + + public String getHeuristicName() { + return _heuristicName; + } + + public String getClassName() { + return _className; + } + + public String getViewName() { + return _viewName; + } + + public ApplicationType getAppType() { + return _appType; + } + + public Map getParamMap() { + return _paramMap; + } +} diff --git a/app/com/linkedin/drelephant/configurations/jobtype/JobTypeConfiguration.java b/app/com/linkedin/drelephant/configurations/jobtype/JobTypeConfiguration.java new file mode 100644 index 000000000..b24a5a7d7 --- /dev/null +++ b/app/com/linkedin/drelephant/configurations/jobtype/JobTypeConfiguration.java @@ -0,0 +1,171 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.configurations.jobtype; + +import com.linkedin.drelephant.analysis.ApplicationType; +import com.linkedin.drelephant.util.Utils; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.regex.PatternSyntaxException; + +import org.apache.log4j.Logger; +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; + +import com.linkedin.drelephant.analysis.JobType; + + +/** + * This class manages the job type configurations + */ +public class JobTypeConfiguration { + private static final Logger logger = Logger.getLogger(JobTypeConfiguration.class); + private static final int TYPE_LEN_LIMIT = 20; + + private Map> _appTypeToJobTypeList = new HashMap>(); + + public JobTypeConfiguration(Element configuration) { + parseJobTypeConfiguration(configuration); + } + + public Map> getAppTypeToJobTypeList() { + return _appTypeToJobTypeList; + } + + private void parseJobTypeConfiguration(Element configuration) { + + Map defaultMap = new HashMap(); + + NodeList nodes = configuration.getChildNodes(); + int n = 0; + for (int i = 0; i < nodes.getLength(); i++) { + Node node = nodes.item(i); + if (node.getNodeType() == Node.ELEMENT_NODE) { + n++; + Element jobTypeNode = (Element) node; + + String jobTypeName; + Node jobTypeNameNode = jobTypeNode.getElementsByTagName("name").item(0); + if (jobTypeNameNode == null) { + throw new RuntimeException("No tag 'jobtype' in jobtype " + n); + } + jobTypeName = jobTypeNameNode.getTextContent(); + if (jobTypeName.equals("")) { + throw new RuntimeException("Empty tag 'jobtype' in jobtype " + n); + } + // Truncate jobtype length for db constraint + if (jobTypeName.length() > TYPE_LEN_LIMIT) { + logger.info("Truncate type " + jobTypeName.length()); + jobTypeName = jobTypeName.substring(0, TYPE_LEN_LIMIT); + } + + String jobConfName; + Node jobConfNameNode = jobTypeNode.getElementsByTagName("conf").item(0); + if (jobConfNameNode == null) { + throw new RuntimeException("No tag 'conf' in jobtype " + jobTypeName); + } + jobConfName = jobConfNameNode.getTextContent(); + if (jobConfName.equals("")) { + throw new RuntimeException("Empty tag 'conf' in jobtype " + jobTypeName); + } + + String jobConfValue; + Node jobConfValueNode = jobTypeNode.getElementsByTagName("value").item(0); + if (jobConfValueNode == null) { + // Default regex. match any char one or more times + jobConfValue = ".*"; + } else { + jobConfValue = jobConfValueNode.getTextContent(); + if (jobConfValue.equals("")) { + jobConfValue = ".*"; + } + } + + String appTypeName; + Node appTypeNameNode = jobTypeNode.getElementsByTagName("applicationtype").item(0); + if (appTypeNameNode == null) { + throw new RuntimeException("No tag 'applicationtype' in jobtype " + jobTypeName); + } + appTypeName = appTypeNameNode.getTextContent(); + ApplicationType appType = new ApplicationType(appTypeName); + + boolean isDefault = jobTypeNode.getElementsByTagName("isDefault").item(0) != null; + + JobType newJobType = null; + try { + newJobType = new JobType(jobTypeName, jobConfName, jobConfValue); + } catch (PatternSyntaxException e) { + throw new RuntimeException( + "Error processing this pattern. Pattern:" + jobConfValue + " jobtype:" + jobTypeName); + } + + String newJobTypeStr = String + .format("jobType:%s, for application type:%s, isDefault:%s, confName:%s, confValue:%s.", jobTypeName, + appTypeName, isDefault, jobConfName, jobConfValue); + logger.info("Loaded " + newJobTypeStr); + + if (isDefault) { + if (defaultMap.containsKey(appType)) { + throw new RuntimeException( + "Each application type should have one and only one default job type. Duplicate default job type: " + + newJobTypeStr + " for application type: " + appType.getName()); + } else { + defaultMap.put(appType, newJobType); + } + } else { + List jobTypes = getJobTypeList(appType); + jobTypes.add(newJobType); + } + } + } + + // Append default maps to the end of each job type list + for (Map.Entry entry : defaultMap.entrySet()) { + ApplicationType appType = entry.getKey(); + JobType jobType = entry.getValue(); + List jobTypes = getJobTypeList(appType); + jobTypes.add(jobType); + } + + // Sanity check + for(ApplicationType appType : _appTypeToJobTypeList.keySet()) { + if (!defaultMap.containsKey(appType)) { + throw new RuntimeException("Each application type should have one and only one default job type, there is" + + " none for application type: " + appType.getName() + ". Use to tag one."); + } + } + + Integer jobTypesSize = 0; + for (List jobTypes : _appTypeToJobTypeList.values() ) { + jobTypesSize += jobTypes.size(); + } + logger.info("Loaded total " + jobTypesSize + " job types for " + _appTypeToJobTypeList.size() + " app types"); + } + + private List getJobTypeList(ApplicationType appType) { + List jobTypes = _appTypeToJobTypeList.get(appType); + if (jobTypes == null) { + jobTypes = new ArrayList(); + _appTypeToJobTypeList.put(appType, jobTypes); + } + return jobTypes; + } +} diff --git a/app/com/linkedin/drelephant/configurations/scheduler/SchedulerConfiguration.java b/app/com/linkedin/drelephant/configurations/scheduler/SchedulerConfiguration.java new file mode 100644 index 000000000..c7571b2c3 --- /dev/null +++ b/app/com/linkedin/drelephant/configurations/scheduler/SchedulerConfiguration.java @@ -0,0 +1,87 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + + +package com.linkedin.drelephant.configurations.scheduler; + +import com.linkedin.drelephant.util.Utils; + +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * This class manages the scheduler configurations + */ +public class SchedulerConfiguration { + private List _schedulerConfDataList; + + public SchedulerConfiguration(Element configuration) { + parseSchedulerConfiguration(configuration); + } + + public List getSchedulerConfigurationData() { + return _schedulerConfDataList; + } + + private void parseSchedulerConfiguration(Element configuration) { + _schedulerConfDataList = new ArrayList(); + + NodeList nodes = configuration.getChildNodes(); + int n = 0; + for (int i = 0; i < nodes.getLength(); i++) { + // Each scheduler node + Node node = nodes.item(i); + if (node.getNodeType() == Node.ELEMENT_NODE) { + n++; + Element schedulerElem = (Element) node; + + String className; + Node classNameNode = schedulerElem.getElementsByTagName("classname").item(0); + if (classNameNode == null) { + throw new RuntimeException("No tag 'classname' in scheduler " + n); + } + className = classNameNode.getTextContent(); + if (className.equals("")) { + throw new RuntimeException("Empty tag 'classname' in scheduler " + n); + } + + String schedulerName; + Node schedulerNameNode = schedulerElem.getElementsByTagName("name").item(0); + if (schedulerNameNode == null) { + throw new RuntimeException("No tag 'name' in scheduler " + n + " classname " + className); + } + schedulerName = schedulerNameNode.getTextContent(); + if (schedulerName.equals("")) { + throw new RuntimeException("Empty tag 'name' in scheduler " + n + " classname " + className); + } + + // Check if parameters are defined for the scheduler + Map paramsMap = Utils.getConfigurationParameters(schedulerElem); + + SchedulerConfigurationData schedulerData = new SchedulerConfigurationData(schedulerName, className, paramsMap); + _schedulerConfDataList.add(schedulerData); + + } + } + } + +} diff --git a/app/com/linkedin/drelephant/configurations/scheduler/SchedulerConfigurationData.java b/app/com/linkedin/drelephant/configurations/scheduler/SchedulerConfigurationData.java new file mode 100644 index 000000000..303c6cdea --- /dev/null +++ b/app/com/linkedin/drelephant/configurations/scheduler/SchedulerConfigurationData.java @@ -0,0 +1,47 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.configurations.scheduler; + +import java.util.Map; + + +/** + * Scheduler Configuration Holder + */ +public class SchedulerConfigurationData { + private final String _schedulerName; + private final String _className; + private final Map _paramMap; + + public SchedulerConfigurationData(String schedulerName, String className, Map paramMap) { + _schedulerName = schedulerName; + _className = className; + _paramMap = paramMap; + } + + public String getSchedulerName() { + return _schedulerName; + } + + public String getClassName() { + return _className; + } + + public Map getParamMap() { + return _paramMap; + } +} diff --git a/app/com/linkedin/drelephant/exceptions/EventException.java b/app/com/linkedin/drelephant/exceptions/EventException.java new file mode 100644 index 000000000..31a0129ed --- /dev/null +++ b/app/com/linkedin/drelephant/exceptions/EventException.java @@ -0,0 +1,111 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.exceptions; + +import org.apache.log4j.Logger; + +import java.util.ArrayList; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + + +/** + * This class represents an exception in the exception chain(a list of exceptions) + */ + +public class EventException { + private final Logger logger = Logger.getLogger(EventException.class); + + + Pattern stackTraceLinePattern = Pattern.compile("^[\\\\t \\t]*at (.+)\\.(.+(?=\\())\\((.*)\\)"); + /** + Example string: '\tat org.testng.Assert.fail(Assert.java:89)' + matches: ['org.testng.Assert', 'fail', "Assert.java:89'] + */ + + + Pattern exceptionDetailsPattern = Pattern.compile("^([^() :]*): (.*)"); + /** + Example string: 'java.lang.AssertionError: Failure 1 expected: but was:' + matches: ['java.lang.AssertionError','Failure 1 expected: but was:'] + */ + + + Pattern separateLinesPattern = Pattern.compile(".*\\n"); + private String _type; + private int _index; + private String _message; + private List _stackTrace; + + public EventException(int index, String rawEventException) { + this._index = index; + processRawString(rawEventException); + } + + /** + * Returns the message in EventException + * @return message in event exception + */ + public String getMessage() { + return _message; + } + + /** + * Process a raw exception string and sets the field of EventException Object + * @param rawEventException exception in a string form + */ + private void processRawString(String rawEventException) { + int frameIndex = 0; + List stackTrace = new ArrayList(); + List lines = stringToListOfLines(rawEventException); + + for (String line : lines) { + Matcher exceptionDetailsMatcher = exceptionDetailsPattern.matcher(line); + if (exceptionDetailsMatcher.find()) { + this._type = exceptionDetailsMatcher.group(1); + this._message = exceptionDetailsMatcher.group(2); + } else { + Matcher stackTraceLineMatcher = stackTraceLinePattern.matcher(line); + if (stackTraceLineMatcher.find()) { + String source = stackTraceLineMatcher.group(1); + String call = stackTraceLineMatcher.group(2); + String fileDetails = stackTraceLineMatcher.group(3); + StackTraceFrame stackTraceFrame = new StackTraceFrame(frameIndex, source, call, fileDetails); + stackTrace.add(stackTraceFrame); + frameIndex += 1; + } + } + } + this._stackTrace = stackTrace; + } + + /** + * Takes a exception in string form and converts it into a list of string where each string corresponds to a line in + * exception + * @param rawEventException exception in a string form + * @return list of lines in the exception + */ + private List stringToListOfLines(String rawEventException) { + Matcher separateLinesMatcher = separateLinesPattern.matcher(rawEventException); + List lines = new ArrayList(); + while (separateLinesMatcher.find()) { + lines.add(separateLinesMatcher.group()); + } + return lines; + } +} diff --git a/app/com/linkedin/drelephant/exceptions/ExceptionFinder.java b/app/com/linkedin/drelephant/exceptions/ExceptionFinder.java new file mode 100644 index 000000000..19098eab4 --- /dev/null +++ b/app/com/linkedin/drelephant/exceptions/ExceptionFinder.java @@ -0,0 +1,227 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.exceptions; + +import com.linkedin.drelephant.configurations.scheduler.SchedulerConfigurationData; +import com.linkedin.drelephant.security.HadoopSecurity; +import com.linkedin.drelephant.util.InfoExtractor; +import java.io.File; +import java.io.IOException; +import java.security.PrivilegedAction; +import javax.naming.AuthenticationException; +import org.apache.log4j.Logger; + +import java.net.MalformedURLException; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Set; + + +/** + * ExceptionFinder class finds the exception along with the level of the exception. It takes the scheduler and the url of the workflow as + * parameters. + */ +public class ExceptionFinder { + private final Logger logger = Logger.getLogger(ExceptionFinder.class); + private HadoopException _exception; + private WorkflowClient _workflowClient; + private MRClient _mrClient; + + private static String USERNAME = "username"; + private static String PRIVATE_KEY = "private_key"; + private static String PASSWORD = "password"; + private static int SAMPLE_SIZE = 3; + + /** + * Constructor for ExceptionFinder class + * @param url The url of the workflow to analyze + * @param scheduler The scheduler where the workflow was run. + * @throws URISyntaxException + * @throws MalformedURLException + */ + public ExceptionFinder(String url, String scheduler) + throws URISyntaxException, MalformedURLException, AuthenticationException, IOException { + + // create a new MRClient + _mrClient = new MRClient(); + + // create a new workflow client + _workflowClient = InfoExtractor.getWorkflowClientInstance(scheduler, url); + + // get the schedulerData + SchedulerConfigurationData schedulerData = InfoExtractor.getSchedulerData(scheduler); + + + if(schedulerData==null) { + throw new RuntimeException(String.format("Cannot find scheduler %s", scheduler)); + } + + if(!schedulerData.getParamMap().containsKey(USERNAME)) { + throw new RuntimeException(String.format("Cannot find username for login")); + } + + String username = schedulerData.getParamMap().get(USERNAME); + + if(schedulerData.getParamMap().containsKey(PRIVATE_KEY)) { + _workflowClient.login(username, new File(schedulerData.getParamMap().get(PRIVATE_KEY))); + } else if (schedulerData.getParamMap().containsKey(PASSWORD)) { + _workflowClient.login(username, schedulerData.getParamMap().get(PASSWORD)); + } else { + throw new RuntimeException("Neither private key nor password was specified"); + } + _exception = analyzeFlow(url); + } + + /** + * Analyzes a Flow and returns a HadoopException object which captures all the exception in the flow. + * @param execUrl the execution URL of the flow + * @return HadoopException object which captures all the exceptions in the given Flow + */ + private HadoopException analyzeFlow(final String execUrl) throws AuthenticationException, IOException { + HadoopSecurity _hadoopSecurity = HadoopSecurity.getInstance(); + + return _hadoopSecurity.doAs(new PrivilegedAction() { + @Override + public HadoopException run() { + HadoopException flowLevelException = new HadoopException(); + List childExceptions = new ArrayList(); + Map jobIdStatus = _workflowClient.getJobsFromFlow(); + + // Find exceptions in all the unsuccessful jobs of the workflow + for (String unsuccessfulJobId : jobIdStatus.keySet()) { + if (jobIdStatus.get(unsuccessfulJobId).toLowerCase().equals("failed")) { + HadoopException jobLevelException = analyzeJob(unsuccessfulJobId); + childExceptions.add(jobLevelException); + } + } + + flowLevelException.setType(HadoopException.HadoopExceptionType.FLOW); + flowLevelException.setId(execUrl); + flowLevelException.setLoggingEvent(null); // No flow level exception + flowLevelException.setChildExceptions(childExceptions); + return flowLevelException; + } + }); + } + + /** + * Given a failed Job, this method analyzes the job and returns a HadoopException object which captures all the exception in the given job. + * @param jobId The job execution id/url, specific to the scheduler + * @return HadoopException object which captures all the exceptions in the given job + */ + private HadoopException analyzeJob(String jobId) { + HadoopException jobLevelException = new HadoopException(); + List childExceptions = new ArrayList(); + + _workflowClient.analyzeJob(jobId); + + // get the set of all the yarn jobs from workflowClient + Set yarnJobIds = _workflowClient.getYarnApplicationsFromJob(jobId); + + for (String mrJobId : yarnJobIds) { + //To do: Check if mr job logs are there or not in job history server + String rawMRJobLog = _mrClient.getMRJobLog(mrJobId); + if (rawMRJobLog != null && !rawMRJobLog.isEmpty()) { // null for log not found and empty for successful mr jobs + //To do: rawMRJob is empty for successful mr jobs but this is not a good way to figure out whether a job failed + // or succeeded, do this using the state field in rest api + HadoopException mrJobLevelException = analyzeMRJob(mrJobId, rawMRJobLog); + childExceptions.add(mrJobLevelException); + } + } + + if (_workflowClient.getJobState(jobId) == JobState.MRFAIL) { + jobLevelException.setType(HadoopException.HadoopExceptionType.MR); + jobLevelException.setLoggingEvent(_workflowClient.getJobException(jobId)); + //LoggingEvent is set only for the case if mr logs could not be found in job history server and childException is + // empty + jobLevelException.setChildExceptions(childExceptions); + } else if (_workflowClient.getJobState(jobId) == JobState.SCHEDULERFAIL) { + jobLevelException.setType(HadoopException.HadoopExceptionType.SCHEDULER); + jobLevelException.setLoggingEvent(_workflowClient.getJobException(jobId)); + jobLevelException.setChildExceptions(null); + } else if (_workflowClient.getJobState(jobId) == JobState.SCRIPTFAIL) { + jobLevelException.setType(HadoopException.HadoopExceptionType.SCRIPT); + jobLevelException.setLoggingEvent(_workflowClient.getJobException(jobId)); + jobLevelException.setChildExceptions(null); + } else if (_workflowClient.getJobState(jobId) == JobState.KILLED) { + jobLevelException.setType(HadoopException.HadoopExceptionType.KILL); + jobLevelException.setLoggingEvent(null); + jobLevelException.setChildExceptions(null); + } + jobLevelException.setId(jobId); + return jobLevelException; + } + + /** + * Given a failed MR Job id and diagnostics of the job, this method analyzes it and returns a HadoopException object which captures all the exception in the given MR Job. + * @param mrJobId Mapreduce job id + * @param rawMRJoblog Diagnostics of the mapreduce job in a string + * @return HadoopException object which captures all the exceptions in the given Mapreduce job + */ + private HadoopException analyzeMRJob(String mrJobId, String rawMRJoblog) { + // This method is called only for unsuccessful MR jobs + HadoopException mrJobLevelException = new HadoopException(); + List childExceptions = new ArrayList(); + MRJobLogAnalyzer analyzedLog = new MRJobLogAnalyzer(rawMRJoblog); + Set failedMRTaskIds = analyzedLog.getFailedSubEvents(); + + // sampling of tasks + int samplingSize = SAMPLE_SIZE; + for (String failedMRTaskId : failedMRTaskIds) { + if(samplingSize<=0) { + break; + } + String rawMRTaskLog = _mrClient.getMRTaskLog(mrJobId, failedMRTaskId); + HadoopException mrTaskLevelException = analyzeMRTask(failedMRTaskId, rawMRTaskLog); + childExceptions.add(mrTaskLevelException); + + samplingSize--; + } + + mrJobLevelException.setChildExceptions(childExceptions); + mrJobLevelException.setLoggingEvent(analyzedLog.getException()); + mrJobLevelException.setType(HadoopException.HadoopExceptionType.MRJOB); + mrJobLevelException.setId(mrJobId); + return mrJobLevelException; + } + + /** + * Given a failed MR Task id and diagnostics of the task, this method analyzes it and returns a HadoopException object which captures all the exception in the given MR task. + * @param mrTaskId The task id of the map reduce job + * @param rawMRTaskLog Raw map-reduce log + * @return HadoopException object which captures all the exceptions in the given Mapreduce task + */ + private HadoopException analyzeMRTask(String mrTaskId, String rawMRTaskLog) { + HadoopException mrTaskLevelException = new HadoopException(); + MRTaskLogAnalyzer analyzedLog = new MRTaskLogAnalyzer(rawMRTaskLog); + mrTaskLevelException.setLoggingEvent(analyzedLog.getException()); + mrTaskLevelException.setType(HadoopException.HadoopExceptionType.MRTASK); + mrTaskLevelException.setId(mrTaskId); + mrTaskLevelException.setChildExceptions(null); + return mrTaskLevelException; + } + + /** + * Returns the Hadoop Exception object + * @return Returns the Hadoop Exception object + */ + public HadoopException getExceptions() { + return this._exception; + } +} diff --git a/app/com/linkedin/drelephant/exceptions/HadoopException.java b/app/com/linkedin/drelephant/exceptions/HadoopException.java new file mode 100644 index 000000000..6dce11208 --- /dev/null +++ b/app/com/linkedin/drelephant/exceptions/HadoopException.java @@ -0,0 +1,77 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.exceptions; + +import org.apache.log4j.Logger; + +import java.util.List; + + +public class HadoopException { + private final Logger logger = Logger.getLogger(HadoopException.class); + private String _id = "UNKNOWN"; + public enum HadoopExceptionType {FLOW, SCHEDULER, SCRIPT, MR, KILL, MRJOB, MRTASK} + /** + * FLOW: HadoopException object for Azkaban flow + * SCHEDULER : HadoopException object for Azkaban job with Azkaban level failure + * SCRIPT : HadoopException object for Azkaban job with Script level failure + * MR: HadoopException object for Azkaban job with MR level failure + * KILL: HadoopException object for killed Azkaban job + * MRJOB: HadoopException object for MR Job + * MRTASK: HadoopException object for MR Task + * */ + + private HadoopExceptionType _type; + private LoggingEvent _loggingEvent; + private List _childExceptions; + + public String getId() { + return _id; + } + + public void setId(String id) { + _id = id; + } + + public HadoopExceptionType getType() { + return _type; + } + + public void setType(HadoopExceptionType type) { + _type = type; + } + + public LoggingEvent getLoggingEvent() { + return _loggingEvent; + } + + public void setLoggingEvent(LoggingEvent e) { + _loggingEvent = e; + } + + public List getChildExceptions() { + return _childExceptions; + } + + public void setChildExceptions(List childExceptions) { + _childExceptions = childExceptions; + } + + +} + + diff --git a/app/com/linkedin/drelephant/exceptions/JobState.java b/app/com/linkedin/drelephant/exceptions/JobState.java new file mode 100644 index 000000000..2a034daee --- /dev/null +++ b/app/com/linkedin/drelephant/exceptions/JobState.java @@ -0,0 +1,24 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.exceptions; + +/** + * This enum represents the state of the job + */ +public enum JobState { + SCRIPTFAIL, SCHEDULERFAIL, MRFAIL, SUCCEEDED, KILLED +} diff --git a/app/com/linkedin/drelephant/exceptions/LoggingEvent.java b/app/com/linkedin/drelephant/exceptions/LoggingEvent.java new file mode 100644 index 000000000..b1bf7e634 --- /dev/null +++ b/app/com/linkedin/drelephant/exceptions/LoggingEvent.java @@ -0,0 +1,124 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.exceptions; + +import org.apache.log4j.Logger; + +import java.util.ArrayList; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + + +public class LoggingEvent { + + private final Logger logger = Logger.getLogger(LoggingEvent.class); + private List _rawLog; + private String _log; // To do + private long _timestamp; //To do: Get time from logs and fill this field + private enum LoggingLevel {DEBUG, INFO, WARNING, ERROR, FATAL} + private LoggingLevel _level = LoggingLevel.ERROR; // For now I have this to be eeror + private String _message; + private List _exceptionChain; + + public LoggingEvent(String exceptionChainString) { + this._rawLog = exceptionChainStringToListOfExceptions(exceptionChainString); + setExceptionChain(); + setMessage(); + } + + + + /** + @return Returns the exception chain in the form of list of list of string. + A list of string corresponds to an exception in the exception chain + A string corresponds to a line in an exception + */ + + public List> getLog() { + List> log = new ArrayList>(); + for (String exceptionString : _rawLog) { + List exception = exceptionStringToListOfLines(exceptionString); + log.add(exception); + } + return log; + } + + + private void setExceptionChain() { + List exceptionChain = new ArrayList(); + int index = 0; + + for (String rawEventException : _rawLog) { + EventException eventException = new EventException(index, rawEventException); + exceptionChain.add(eventException); + index += 1; + } + _exceptionChain = exceptionChain; + } + + /** + * Converts a exception chain string to a list of string exceptions + * @param s Exception chain in a string + * @return List of exceptions in given the exception chain + */ + private List exceptionChainStringToListOfExceptions(String s) { + List chain = new ArrayList(); + Pattern stackTraceCausedByClause = Pattern.compile(".*^(?!Caused by).+\\n(?:.*\\tat.+\\n)+"); + Pattern stackTraceOtherThanCausedByClause = Pattern.compile(".*Caused by.+\\n(?:.*\\n)?(?:.*\\s+at.+\\n)*"); + + Matcher matcher = stackTraceCausedByClause.matcher(s); + while (matcher.find()) { + chain.add(matcher.group()); + } + matcher = stackTraceOtherThanCausedByClause.matcher(s); + while (matcher.find()) { + chain.add(matcher.group()); + } + + if (chain.isEmpty()) { + //error logs other than stack traces for ex- logs of azkaban level failure in azkaban job + chain.add(s); + } + return chain; + } + + /** + * Converts a exception string to a list of string corresponding to lines in the exception + * @param s Exception in a single string + * @return List of individual lines in the string + */ + private List exceptionStringToListOfLines(String s) { + List exception = new ArrayList(); + Matcher matcher = Pattern.compile(".*\\n").matcher(s); + while (matcher.find()) { + exception.add(matcher.group()); + } + return exception; + } + + /** Sets message for the logging event + For now, It is set to be equal to the message field of first EventException in _exceptionChain + This can be changed depending on message of which EventException is most relevant for the user to see + */ + private void setMessage() { + if (!_exceptionChain.isEmpty()) { + this._message = _exceptionChain.get(0).getMessage(); + } + } + +} diff --git a/app/com/linkedin/drelephant/exceptions/MRClient.java b/app/com/linkedin/drelephant/exceptions/MRClient.java new file mode 100644 index 000000000..8ef75c250 --- /dev/null +++ b/app/com/linkedin/drelephant/exceptions/MRClient.java @@ -0,0 +1,123 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.exceptions; + +import com.linkedin.drelephant.security.HadoopSecurity; +import java.security.PrivilegedAction; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.authentication.client.AuthenticatedURL; +import org.apache.hadoop.security.authentication.client.AuthenticationException; +import org.apache.log4j.Logger; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.map.ObjectMapper; + +import java.io.IOException; +import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.URL; + + +/** + * Client to interact with job history server and get the mapreduce logs + **/ +public class MRClient { + private static final Logger logger = Logger.getLogger(MRClient.class); + final String jhistoryAddr = new Configuration().get("mapreduce.jobhistory.webapp.address"); + private AuthenticatedURL.Token _token; + private AuthenticatedURL _authenticatedURL; + + public MRClient() { + _token = new AuthenticatedURL.Token(); + _authenticatedURL = new AuthenticatedURL(); + } + + /** + * For a given rest url, fetchs and return the jsonnode + * + * @param url rest job history server url + * @return Json node to which the url points + */ + private JsonNode fetchJson(final URL url) + throws IOException { + try { + ObjectMapper objectMapper = new ObjectMapper(); + HttpURLConnection conn = _authenticatedURL.openConnection(url, _token); + return objectMapper.readTree(conn.getInputStream()); + } catch (AuthenticationException e) { + logger.error(String.format("Cannot authenticate in Mr Client %s", e.getMessage())); + } catch (IOException e) { + logger.error(String.format("Error reading stream in Mr Client %s", e.getMessage())); + } + return null; + } + + /** + * Returns the diagnostics for a given MR Job Id + * @param mrJobId MR Job Id + * @return Diagnostics in a string format + */ + + public String getMRJobLog(String mrJobId) { + String mrJobHistoryURL = "http://" + jhistoryAddr + "/ws/v1/history/mapreduce/jobs/" + mrJobId; + try { + JsonNode response = fetchJson(new URL(mrJobHistoryURL)); + if (response.get("job").get("state").toString() != "SUCCEEDED") { + return response.get("job").get("diagnostics").getTextValue(); + } + } catch (MalformedURLException e) { + logger.error(String.format("Malformed URL %s in MR Client: %s ", mrJobHistoryURL, e.getMessage())); + } catch (NullPointerException e) { + logger.error(String.format("Invalid response %s", e.getMessage())); + } catch (IOException e) { + logger.error(String.format("IOException in Mr Client: %s", e.getMessage())); + } + return null; + } + + /** + * Returns the last task attempt diagnostic for a given failed taskId + * + * @param mrJobId MR Job Id + * @param mrTaskId MRTask Id + * @return Diagnostic in a string format + */ + public String getMRTaskLog(String mrJobId, String mrTaskId) { + String mrTaskHistoryURL = + "http://" + jhistoryAddr + "/ws/v1/history/mapreduce/jobs/" + mrJobId + "/tasks/" + mrTaskId + "/attempts"; + ; + try { + JsonNode response = fetchJson(new URL(mrTaskHistoryURL)); + int attempts = response.get("taskAttempts").get("taskAttempt").size(); + int maxattempt = 0; + int maxattemptid = 0; + for (int i = 0; i < attempts; i++) { + int attempt = Integer + .parseInt(response.get("taskAttempts").get("taskAttempt").get(i).get("id").getTextValue().split("_")[5]); + if (attempt > maxattempt) { + maxattemptid = i; + maxattempt = attempt; + } + } + return response.get("taskAttempts").get("taskAttempt").get(maxattemptid).get("diagnostics").getTextValue(); + } catch (MalformedURLException e) { + logger.error(e.toString()); + } catch (IOException e) { + logger.error(e.toString()); + } + return null; + } +} diff --git a/app/com/linkedin/drelephant/exceptions/MRJobLogAnalyzer.java b/app/com/linkedin/drelephant/exceptions/MRJobLogAnalyzer.java new file mode 100644 index 000000000..17ef247b3 --- /dev/null +++ b/app/com/linkedin/drelephant/exceptions/MRJobLogAnalyzer.java @@ -0,0 +1,87 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.exceptions; + +import org.apache.log4j.Logger; + +import java.util.HashSet; +import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + + +/** +* Given a MR Job log, sets the list of unsuccessful tasks and MR job level exception (if any) +*/ + +public class MRJobLogAnalyzer { + private static final Logger logger = Logger.getLogger(MRJobLogAnalyzer.class); + + private Pattern _mrJobExceptionPattern = + Pattern.compile(".*\\n(?:.*\\tat.+\\n)+(?:.*Caused by.+\\n(?:.*\\n)?(?:.*\\s+at.+\\n)*)*"); + private Pattern _unsuccessfulMRTaskIdPattern = + Pattern.compile("Task (?:failed) (task_[0-9]+_[0-9]+_[mr]_[0-9]+)"); + private LoggingEvent _exception; + private Set _failedSubEvents; + + public MRJobLogAnalyzer(String rawLog) { + setFailedSubEvents(rawLog); + setException(rawLog); + } + + /** + * Given MR Job log, finds the list of unsuccessful tasks and sets it equal to _failedSubEvents + * @param rawLog MR Job log in a string + */ + private void setFailedSubEvents(String rawLog) { + Set failedSubEvents = new HashSet(); + Matcher unsuccessfulMRTaskIdMatcher = _unsuccessfulMRTaskIdPattern.matcher(rawLog); + while (unsuccessfulMRTaskIdMatcher.find()) { + failedSubEvents.add(unsuccessfulMRTaskIdMatcher.group(1)); + } + this._failedSubEvents = failedSubEvents; + } + + /** + * Given MR Job log, finds the MR Job level exception and sets it equal to _exception + * @param rawLog MR Job log in a string + */ + private void setException(String rawLog) { + Matcher mrJobExceptionMatcher = _mrJobExceptionPattern.matcher(rawLog); + if (mrJobExceptionMatcher.find()) { + this._exception = new LoggingEvent(mrJobExceptionMatcher.group()); + } + } + + /** + * Returns the list of unsuccessful tasks in given MR Job log + * @return list of unsuccessful tasks in MR Job log + */ + public Set getFailedSubEvents() { + return this._failedSubEvents; + } + + /** + * Returns the MR Job level exception + * @return _exception of type LoggingEvent. + */ + public LoggingEvent getException() { + return this._exception; + } + + +} \ No newline at end of file diff --git a/app/com/linkedin/drelephant/exceptions/MRTaskLogAnalyzer.java b/app/com/linkedin/drelephant/exceptions/MRTaskLogAnalyzer.java new file mode 100644 index 000000000..13f63ac19 --- /dev/null +++ b/app/com/linkedin/drelephant/exceptions/MRTaskLogAnalyzer.java @@ -0,0 +1,61 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.exceptions; + +import org.apache.log4j.Logger; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + + +/** +* Given a MR Task log, sets the exception (if any) in the log +*/ +public class MRTaskLogAnalyzer { + private static final Logger logger = Logger.getLogger(MRTaskLogAnalyzer.class); + private LoggingEvent _exception; + private long MAX_EXCEPTIONS = 5; + private Pattern mrTaskExceptionPattern = + Pattern.compile("Error: (.*\\n(?:.*\\tat.+\\n)+(?:.*Caused by.+\\n(?:.*\\n)?(?:.*\\s+at.+\\n)*)*)"); + + public MRTaskLogAnalyzer(String rawLog) { + setException(rawLog); + } + + /** + * Gets the exception of the mr task + * @return The LoggingEvent corresponding to the exception + */ + public LoggingEvent getException() { + return this._exception; + } + + /** + * Sets the exception of the mr task + * @param rawLog Raw log of the task + */ + private void setException(String rawLog) { + Matcher matcher = mrTaskExceptionPattern.matcher(rawLog); + long limitOnExceptionChains = MAX_EXCEPTIONS; + StringBuilder exceptionBuilder = new StringBuilder(); + while (matcher.find() && limitOnExceptionChains>=0) { + exceptionBuilder.append(matcher.group()); + limitOnExceptionChains--; + } + this._exception = new LoggingEvent(exceptionBuilder.toString()); + } +} \ No newline at end of file diff --git a/app/com/linkedin/drelephant/exceptions/StackTraceFrame.java b/app/com/linkedin/drelephant/exceptions/StackTraceFrame.java new file mode 100644 index 000000000..00cef2f88 --- /dev/null +++ b/app/com/linkedin/drelephant/exceptions/StackTraceFrame.java @@ -0,0 +1,66 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.exceptions; + +import org.apache.log4j.Logger; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + + +public class StackTraceFrame { + + private final Logger logger = Logger.getLogger(StackTraceFrame.class); + private String _source; + private String _fileName; + private int _lineNumber; + private String _call; + private boolean _nativeMethod; + private int _index; + + public StackTraceFrame(int index, String source, String call, String fileDetails) { + this._source = source; + this._call = call; + this._index = index; + getFileDetails(fileDetails); + } + + private void getFileDetails(String fileDetails) { + boolean nativeMethod = false; + String fileName = fileDetails; + String lineNumber = "0"; + Pattern file = Pattern.compile("(.*):(.*)"); + + /** + Example string: 'Assert.java:89' + matches: ['Assert.java', '89'] + */ + + if (fileDetails.equals("Native Method")) { + nativeMethod = true; + } else { + Matcher match = file.matcher(fileDetails); + if (match.find()) { + fileName = match.group(1); + lineNumber = match.group(2); + } + } + this._fileName = fileName; + this._lineNumber = Integer.parseInt(lineNumber); // To do: Can throw parseException + this._nativeMethod = nativeMethod; + } +} diff --git a/app/com/linkedin/drelephant/exceptions/WorkflowClient.java b/app/com/linkedin/drelephant/exceptions/WorkflowClient.java new file mode 100644 index 000000000..ab2020fb9 --- /dev/null +++ b/app/com/linkedin/drelephant/exceptions/WorkflowClient.java @@ -0,0 +1,78 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.exceptions; + +import java.io.File; +import java.util.Map; +import java.util.Set; + + +/** + * The interface WorkflowClient should be implemented by all the workflow client. The client should not + * be confused with the a client of the scheduler since the context of this client is limited to a workflow + * and it doesn't operate at a scheduler level. + */ +public interface WorkflowClient { + + /** + * Login to the scheduler using the username and the password + * @param username The username of the user + * @param password The password of the user + */ + public void login(String username, String password); + + /** + * Login to the scheduler using the username and the private key + * @param username The username of the user + * @param privateKey The private key of the user + */ + public void login(String username, File privateKey); + + /** + * Return all the jobs in the workflow. It returns a Map where the key \n + * is the execution id of the job and the value is the status of the job. + * @return Return all the jobs in the workflow + */ + public Map getJobsFromFlow(); + + /** + * Given a job id, this method analyzes the job + * @param jobId The execution id of the job + */ + public void analyzeJob(String jobId); + + /** + * This method extracts out all the yarn applications from the job and returns the set of them. + * @param jobId The jobid of the job. + * @return The set of all the yarn applications spawned by the job + */ + public Set getYarnApplicationsFromJob(String jobId); + + /** + * Returns the job state of the job. + * @param jobId The id of the job + * @return Retruns the state of the job + */ + public JobState getJobState(String jobId); + + /** + * Get the exception, given a job id + * @param jobId The id of the job + * @return The exeception encountered + */ + public LoggingEvent getJobException(String jobId); +} diff --git a/app/com/linkedin/drelephant/exceptions/azkaban/AzkabanJobLogAnalyzer.java b/app/com/linkedin/drelephant/exceptions/azkaban/AzkabanJobLogAnalyzer.java new file mode 100644 index 000000000..71ab0b742 --- /dev/null +++ b/app/com/linkedin/drelephant/exceptions/azkaban/AzkabanJobLogAnalyzer.java @@ -0,0 +1,208 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.exceptions.azkaban; + +import com.linkedin.drelephant.exceptions.JobState; +import com.linkedin.drelephant.exceptions.LoggingEvent; +import java.util.LinkedHashSet; +import org.apache.log4j.Logger; + +import java.util.HashSet; +import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + + +/* +* Given a Azkaban job log returns the Azkaban Job State, list of all MR job ids in the given log and exception (if any) at the Azkaban job level +*/ + +public class AzkabanJobLogAnalyzer { + + private static final Logger logger = Logger.getLogger(AzkabanJobLogAnalyzer.class); + private Pattern _successfulAzkabanJobPattern = + Pattern.compile("Finishing job [^\\s]+ attempt: [0-9]+ at [0-9]+ with status SUCCEEDED"); + private Pattern _failedAzkabanJobPattern = + Pattern.compile("Finishing job [^\\s]+ attempt: [0-9]+ at [0-9]+ with status FAILED"); + private Pattern _killedAzkabanJobPattern = + Pattern.compile("Finishing job [^\\s]+ attempt: [0-9]+ at [0-9]+ with status KILLED"); + private Pattern _scriptFailPattern = Pattern.compile("ERROR - Job run failed!"); + // Alternate pattern: (".+\\n(?:.+\\tat.+\\n)+(?:.+Caused by.+\\n(?:.*\\n)?(?:.+\\s+at.+\\n)*)*"); + private Pattern _scriptOrMRFailExceptionPattern = Pattern.compile("(Caused by.+\\n(?:.*\\n)?((?:.+\\s+at.+\\n)*))+"); + private Pattern _azkabanFailExceptionPattern = Pattern.compile( + "\\d{2}[-/]\\d{2}[-/]\\d{4} \\d{2}:\\d{2}:\\d{2} (PST|PDT) [^\\s]+ (?:ERROR|WARN|FATAL|Exception) .*\\n"); + private Pattern _mrJobIdPattern = Pattern.compile("job_[0-9]+_[0-9]+"); + private Pattern _mrPigJobIdPattern = Pattern.compile("job job_[0-9]+_[0-9]+ has failed!"); + private Pattern _mrHiveJobIdPattern = Pattern.compile("ERROR Ended Job = job_[0-9]+_[0-9]+ with errors"); + private static long SAMPLING_SIZE = 5; + + /** + * Failure at Azkaban job log is broadly categorized into three categorized into three categories + * SCHEDULERFAIL: Failure at azkaban level + * SCRIPTFAIL: Failure at script level + * MRFAIL: Failure at mapreduce level + * */ + private JobState _state; + private LoggingEvent _exception; + private Set _subEvents; + private String _rawLog; + + public AzkabanJobLogAnalyzer(String rawLog) { + this._rawLog = rawLog; + setSubEvents(); + analyzeLog(); + } + + /** + * Analyzes the log to find the level of exception + */ + private void analyzeLog() { + if (_successfulAzkabanJobPattern.matcher(_rawLog).find()) { + succeededAzkabanJob(); + } else if (_failedAzkabanJobPattern.matcher(_rawLog).find()) { + if (!_subEvents.isEmpty()) { + mrLevelFailedAzkabanJob(); + } else if (_scriptFailPattern.matcher(_rawLog).find()) { + scriptLevelFailedAzkabanJob(); + } else { + azkabanLevelFailedAzkabanJob(); + } + } else if (_killedAzkabanJobPattern.matcher(_rawLog).find()) { + killedAzkabanJob(); + } + } + + /** + * Sets the _state and _exception for Succeeded Azkaban job + */ + private void succeededAzkabanJob() { + this._state = JobState.SUCCEEDED; + this._exception = null; + } + + /** + * Sets _state and _exception for Azkaban job which failed at the MR Level + */ + private void mrLevelFailedAzkabanJob() { + this._state = JobState.MRFAIL; + Matcher matcher = _scriptOrMRFailExceptionPattern.matcher(_rawLog); + StringBuilder exceptionBuilder = new StringBuilder(); + long limit = SAMPLING_SIZE; + while (matcher.find() && limit > 0) { + limit--; + exceptionBuilder.append(matcher.group()); + } + this._exception = new LoggingEvent(exceptionBuilder.toString()); + } + + /** + * Set _state and _exception for Azkaban job which failed at the Script Level + */ + private void scriptLevelFailedAzkabanJob() { + this._state = JobState.SCRIPTFAIL; + Matcher matcher = _scriptOrMRFailExceptionPattern.matcher(_rawLog); + StringBuilder exceptionBuilder = new StringBuilder(); + long limit = SAMPLING_SIZE; + while (matcher.find() && limit > 0) { + limit--; + exceptionBuilder.append(matcher.group()); + } + this._exception = new LoggingEvent(exceptionBuilder.toString()); + } + + /** + * Set _state and _exception for Azkaban job which failed at the Azkaban Level + */ + private void azkabanLevelFailedAzkabanJob() { + this._state = JobState.SCHEDULERFAIL; + Matcher matcher = _azkabanFailExceptionPattern.matcher(_rawLog); + if (matcher.find()) { + this._exception = new LoggingEvent(matcher.group()); + } + } + + /** + * Set _state and _exception for killed Azkaban job + */ + private void killedAzkabanJob() { + this._state = JobState.KILLED; + this._exception = null; + } + + /** + * @return returns Azkaban job state + */ + public JobState getState() { + return this._state; + } + + /** + * @return returns list of MR Job Ids in the given Azkaban job log + */ + public Set getSubEvents() { + return this._subEvents; + } + + /** + * Sets _subEvents equal to the list of mr job ids in the given Azkaban job log + */ + private void setSubEvents() { + Set subEvents = new LinkedHashSet(); + + // check for pig jobs + Matcher pigJobMatcher = _mrPigJobIdPattern.matcher(_rawLog); + while (pigJobMatcher.find()) { + String pigJobFailedString = pigJobMatcher.group(); + Matcher jobIdMatcher = _mrJobIdPattern.matcher(pigJobFailedString); + if (jobIdMatcher.find()) { + subEvents.add(jobIdMatcher.group()); + this._subEvents = subEvents; + return; + } + } + + pigJobMatcher.reset(); + + // check for hive jobs + Matcher hiveJobMatcher = _mrHiveJobIdPattern.matcher(_rawLog); + while (hiveJobMatcher.find()) { + String hiveJobFailedString = hiveJobMatcher.group(); + Matcher jobIdMatcher = _mrJobIdPattern.matcher(hiveJobFailedString); + if (jobIdMatcher.find()) { + subEvents.add(jobIdMatcher.group()); + this._subEvents = subEvents; + return; + } + } + + // any other job than pig or hive + Matcher matcher = _mrJobIdPattern.matcher(_rawLog); + long counter = SAMPLING_SIZE; // sample the applications + while (matcher.find() && counter > 0) { + counter--; + subEvents.add(matcher.group()); + } + this._subEvents = subEvents; + } + + /** + * @return returns _exception + */ + public LoggingEvent getException() { + return this._exception; + } +} \ No newline at end of file diff --git a/app/com/linkedin/drelephant/exceptions/azkaban/AzkabanWorkflowClient.java b/app/com/linkedin/drelephant/exceptions/azkaban/AzkabanWorkflowClient.java new file mode 100644 index 000000000..808fa641a --- /dev/null +++ b/app/com/linkedin/drelephant/exceptions/azkaban/AzkabanWorkflowClient.java @@ -0,0 +1,456 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.exceptions.azkaban; + +import com.linkedin.drelephant.exceptions.JobState; +import com.linkedin.drelephant.exceptions.LoggingEvent; +import com.linkedin.drelephant.exceptions.WorkflowClient; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.UnsupportedEncodingException; +import java.net.MalformedURLException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.security.InvalidKeyException; +import java.security.KeyFactory; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.PrivateKey; +import java.security.UnrecoverableKeyException; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; +import java.security.spec.InvalidKeySpecException; +import java.security.spec.PKCS8EncodedKeySpec; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import javax.crypto.BadPaddingException; +import javax.crypto.Cipher; +import javax.crypto.IllegalBlockSizeException; +import javax.crypto.NoSuchPaddingException; +import org.apache.commons.io.IOUtils; +import org.apache.http.HttpResponse; +import org.apache.http.HttpStatus; +import org.apache.http.NameValuePair; +import org.apache.http.client.ClientProtocolException; +import org.apache.http.client.HttpClient; +import org.apache.http.client.entity.UrlEncodedFormEntity; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.utils.URLEncodedUtils; +import org.apache.http.conn.scheme.Scheme; +import org.apache.http.conn.ssl.SSLSocketFactory; +import org.apache.http.conn.ssl.TrustStrategy; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.DefaultHttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.message.BasicNameValuePair; +import org.apache.http.util.EntityUtils; +import org.apache.log4j.Logger; +import org.codehaus.jettison.json.JSONArray; +import org.codehaus.jettison.json.JSONException; +import org.codehaus.jettison.json.JSONObject; + + +/** + Client to interact with azkaban and get information about the workflow + */ +public class AzkabanWorkflowClient implements WorkflowClient { + + private final Logger logger = Logger.getLogger(AzkabanWorkflowClient.class); + + private String _workflowExecutionUrl; + private String _azkabanUrl; + private String _executionId; + private String _sessionId; + private String _username; + private String _password; + + private String AZKABAN_LOG_OFFSET = "0"; + private String AZKABAN_LOG_LENGTH_LIMIT = "9999999"; // limit the log limit to 10 mb + + Map jobIdToLog; + + /** + * Constructor for AzkabanWorkflowClient + * @param url The url of the workflow + * @throws URISyntaxException + * @throws MalformedURLException + */ + public AzkabanWorkflowClient(String url) + throws URISyntaxException, MalformedURLException { + if (url == null || url.isEmpty()) { + throw new MalformedURLException("The Azkaban url is malformed"); + } + this.setAzkabanServerUrl(url); + this.setExecutionId(url); + this._workflowExecutionUrl = url; + this.jobIdToLog = new HashMap(); + } + + /** + * Sets the azkaban server url given the azkaban workflow url + * @param azkabanWorkflowUrl The azkaban workflow url + * @throws MalformedURLException + * @throws URISyntaxException + */ + private void setAzkabanServerUrl(String azkabanWorkflowUrl) + throws MalformedURLException, URISyntaxException { + this._azkabanUrl = "https://" + new URL(azkabanWorkflowUrl).getAuthority(); + } + + /** + * Sets the workflow execution id given the azkaban workflow url + * @param azkabanWorkflowUrl The url of the azkaban workflow + * @throws MalformedURLException + * @throws URISyntaxException + */ + private void setExecutionId(String azkabanWorkflowUrl) + throws MalformedURLException, URISyntaxException { + List params = URLEncodedUtils.parse(new URI(azkabanWorkflowUrl), "UTF-8"); + for (NameValuePair param : params) { + if (param.getName() == "execid") { + this._executionId = param.getValue(); + } + } + } + + /** + * Login using a private key + * @param username The username of the user + * @param _privateKey The path of the private key of the user + */ + @Override + public void login(String username, File _privateKey) { + String headlessChallenge = null; + String decodedPwd = null; + try { + headlessChallenge = getHeadlessChallenge(username); + decodedPwd = decodeHeadlessChallenge(headlessChallenge, _privateKey); + } catch (Exception e) { + logger + .error("Unexpected error encountered while decoding headless challenge " + headlessChallenge + e.toString()); + } + login(username, decodedPwd); + } + + /** + * Authenticates Dr. Elephant in Azkaban and sets the sessionId + * + * @param userName The username of the user + * @param password The password of the user + */ + @Override + public void login(String userName, String password) { + this._username = userName; + this._password = password; + List urlParameters = new ArrayList(); + urlParameters.add(new BasicNameValuePair("action", "login")); + urlParameters.add(new BasicNameValuePair("username", userName)); + urlParameters.add(new BasicNameValuePair("password", password)); + + try { + JSONObject jsonObject = fetchJson(urlParameters, _workflowExecutionUrl); + if (!jsonObject.has("session.id")) { + throw new RuntimeException("Login attempt failed. The session ID could not be obtained."); + } + this._sessionId = jsonObject.get("session.id").toString(); + } catch (JSONException e) { + e.printStackTrace(); + } + } + + /** + * Makes REST API Call for given url parameters and returns the json object + * + * @param urlParameters + * @return Json Object in the response body + */ + private JSONObject fetchJson(List urlParameters, String azkabanUrl) { + HttpPost httpPost = new HttpPost(azkabanUrl); + try { + httpPost.setEntity(new UrlEncodedFormEntity(urlParameters, "UTF-8")); + } catch (UnsupportedEncodingException e) { + e.printStackTrace(); + } + httpPost.setHeader("Accept", "*/*"); + httpPost.setHeader("Content-Type", "application/x-www-form-urlencoded"); + + HttpClient httpClient = new DefaultHttpClient(); + JSONObject jsonObj = null; + try { + SSLSocketFactory socketFactory = new SSLSocketFactory(new TrustStrategy() { + @Override + public boolean isTrusted(X509Certificate[] x509Certificates, String s) + throws CertificateException { + return true; + } + }); + + Scheme scheme = new Scheme("https", 443, socketFactory); + httpClient.getConnectionManager().getSchemeRegistry().register(scheme); + HttpResponse response = httpClient.execute(httpPost); + + if (response.getStatusLine().getStatusCode() != HttpStatus.SC_OK) { + throw new RuntimeException( + response.getStatusLine().toString() + "\nStatus code: " + response.getStatusLine().getStatusCode()); + } + + String result = parseContent(response.getEntity().getContent()); + try { + jsonObj = new JSONObject(result); + if (jsonObj.has("error")) { + throw new RuntimeException(jsonObj.get("error").toString()); + } + } catch (JSONException e) { + e.printStackTrace(); + } + } catch (ClientProtocolException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } catch (NoSuchAlgorithmException e) { + e.printStackTrace(); + } catch (UnrecoverableKeyException e) { + e.printStackTrace(); + } catch (KeyManagementException e) { + e.printStackTrace(); + } catch (KeyStoreException e) { + e.printStackTrace(); + } finally { + httpClient.getConnectionManager().shutdown(); + } + return jsonObj; + } + + /** + * Parses the content given in the form of input stream to String + * @param response the inputstream + * @return The string parsed from the given inputstream + * @throws IOException Throws IOException if the inputstream cannot be parsed to the string + */ + private String parseContent(InputStream response) + throws IOException { + BufferedReader reader = null; + StringBuilder result = new StringBuilder(); + try { + reader = new BufferedReader(new InputStreamReader(response)); + + String line = null; + while ((line = reader.readLine()) != null) { + result.append(line); + } + return result.toString(); + } catch (IOException e) { + e.printStackTrace(); + } finally { + if (reader != null) { + reader.close(); + } + } + return result.toString(); + } + + /** + * @param username The username of the user + * @return Encoded password of the user + * @throws IOException private String getHeadlessChallenge(String username) throws IOException { + */ + + private String getHeadlessChallenge(String username) + throws IOException { + + CloseableHttpClient httpClient = HttpClientBuilder.create().build(); //Use this instead + String encodedPassword = null; + + try { + String userUrl = _azkabanUrl + "/restli/liuser?action=headlessChallenge"; + HttpPost request = new HttpPost(userUrl); + StringEntity params = new StringEntity("{\"username\":\"" + username + "\"}"); + request.addHeader("content-type", "application/json"); + request.setEntity(params); + HttpResponse response = httpClient.execute(request); + String responseString = EntityUtils.toString(response.getEntity()); + JSONObject jobject = new JSONObject(responseString); + encodedPassword = jobject.getString("value"); + } catch (Exception ex) { + throw new RuntimeException("Unexpected exception in decoding headless account " + ex.toString()); + } finally { + httpClient.close(); + return encodedPassword; + } + } + + /** + * Decodes the encoded password using the _privateKey + * @param encodedPassword + * @param _privateKey + * @return The decoded password + * @throws IOException + * @throws NoSuchAlgorithmException + * @throws InvalidKeySpecException + * @throws NoSuchPaddingException + * @throws InvalidKeyException + * @throws IllegalBlockSizeException + * @throws BadPaddingException + */ + private String decodeHeadlessChallenge(String encodedPassword, File _privateKey) + throws IOException, NoSuchAlgorithmException, InvalidKeySpecException, NoSuchPaddingException, + InvalidKeyException, IllegalBlockSizeException, BadPaddingException { + + final String RSA = "RSA"; + final String ASCII = "US-ASCII"; + + // Read private key from file + FileInputStream fstream = new FileInputStream(_privateKey); + byte[] sshPrivateKey = IOUtils.toByteArray(fstream); + PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(sshPrivateKey); + KeyFactory kf = KeyFactory.getInstance(RSA); + PrivateKey privateKey = kf.generatePrivate(keySpec); + + // Init RSA decrypter with private key + Cipher decryptCipher = Cipher.getInstance(RSA); + decryptCipher.init(2, privateKey); + + // Convert base 64 password string to raw bytes + byte[] rawBytes = org.apache.commons.codec.binary.Base64.decodeBase64(encodedPassword.getBytes(ASCII)); + + // Decrypt the encoded raw bytes using decrypter + byte[] decodedBytes = decryptCipher.doFinal(rawBytes); + + // Return decoded bytes as string + return new String(decodedBytes, ASCII); + } + + /** + * Returns the jobs from the flow + * @return The jobs from the flow + */ + public Map getJobsFromFlow() { + List urlParameters = new ArrayList(); + urlParameters.add(new BasicNameValuePair("session.id", _sessionId)); + urlParameters.add(new BasicNameValuePair("ajax", "fetchexecflow")); + urlParameters.add(new BasicNameValuePair("execid", _executionId)); + + try { + JSONObject jsonObject = fetchJson(urlParameters, _workflowExecutionUrl); + JSONArray jobs = jsonObject.getJSONArray("nodes"); + Map jobMap = new HashMap(); + for (int i = 0; i < jobs.length(); i++) { + JSONObject job = jobs.getJSONObject(i); + jobMap.put(job.get("id").toString(), job.get("status").toString()); + } + return jobMap; + } catch (JSONException e) { + e.printStackTrace(); + } + return null; + } + + /** + * Returns the azkaban flow log + * @param offset The offset from which logs should be found + * @param maximumlLogLengthLimit The maximum log length limit + * @return The azkaban flow logs + */ + public String getAzkabanFlowLog(String offset, String maximumlLogLengthLimit) { + List urlParameters = new ArrayList(); + urlParameters.add(new BasicNameValuePair("session.id", _sessionId)); + urlParameters.add(new BasicNameValuePair("ajax", "fetchExecFlowLogs")); + urlParameters.add(new BasicNameValuePair("execid", _executionId)); + urlParameters.add(new BasicNameValuePair("offset", offset)); + urlParameters.add(new BasicNameValuePair("length", maximumlLogLengthLimit)); + + try { + JSONObject jsonObject = fetchJson(urlParameters, _workflowExecutionUrl); + if (jsonObject.getLong("length") == 0) { + throw new RuntimeException("No log found for given execution url!."); + } + return jsonObject.get("data").toString(); + } catch (JSONException e) { + e.printStackTrace(); + } + return null; + } + + @Override + public void analyzeJob(String jobId) { + String rawAzkabanJobLog = getAzkabanJobLog(jobId, AZKABAN_LOG_OFFSET, AZKABAN_LOG_LENGTH_LIMIT); + AzkabanJobLogAnalyzer analyzedLog = new AzkabanJobLogAnalyzer(rawAzkabanJobLog); + jobIdToLog.put(jobId, analyzedLog); + } + + @Override + public Set getYarnApplicationsFromJob(String jobId) { + if (!jobIdToLog.containsKey(jobId)) { + throw new RuntimeException("No job with id " + jobId + " found"); + } + return jobIdToLog.get(jobId).getSubEvents(); + } + + @Override + public JobState getJobState(String jobId) { + if (!jobIdToLog.containsKey(jobId)) { + throw new RuntimeException("No job with id " + jobId + " found"); + } + return jobIdToLog.get(jobId).getState(); + } + + @Override + public LoggingEvent getJobException(String jobId) { + if (!jobIdToLog.containsKey(jobId)) { + throw new RuntimeException("No job with id " + jobId + " found"); + } + return jobIdToLog.get(jobId).getException(); + } + + /** + * Returns the Azkaban Job log for given Azkaban job id. + * + * @param jobId Azkaban job id + * @param offset Offset of log from the start + * @param length Maximum limit on length of log + * @return Azkaban job log in the form of string + */ + public String getAzkabanJobLog(String jobId, String offset, String length) { + List urlParameters = new ArrayList(); + urlParameters.add(new BasicNameValuePair("session.id", _sessionId)); + urlParameters.add(new BasicNameValuePair("ajax", "fetchExecJobLogs")); + urlParameters.add(new BasicNameValuePair("execid", _executionId)); + urlParameters.add(new BasicNameValuePair("jobId", jobId)); + urlParameters.add(new BasicNameValuePair("offset", offset)); + urlParameters.add(new BasicNameValuePair("length", length)); + try { + JSONObject jsonObject = fetchJson(urlParameters, _workflowExecutionUrl); + if (jsonObject.getLong("length") == 0) { // To do: If length ==0 throw exception + logger.info("No log found for azkaban job" + jobId); + } + return jsonObject.get("data").toString(); + } catch (JSONException e) { + e.printStackTrace(); + } + return null; + } +} diff --git a/app/com/linkedin/drelephant/hadoop/HadoopCounterHolder.java b/app/com/linkedin/drelephant/hadoop/HadoopCounterHolder.java deleted file mode 100644 index e0b698bf0..000000000 --- a/app/com/linkedin/drelephant/hadoop/HadoopCounterHolder.java +++ /dev/null @@ -1,90 +0,0 @@ -package com.linkedin.drelephant.hadoop; - -import java.util.Map; - -public class HadoopCounterHolder { - - private Map counters; - - public HadoopCounterHolder(Map counterMap) { - counters = counterMap; - } - - public long get(CounterName counterName) { - Long value = counters.get(counterName); - if (value == null) { - return 0; - } - return value; - } - - public void set(CounterName counterName, long value) { - counters.put(counterName, value); - } - - public static enum GroupName { - FileInput("org.apache.hadoop.mapred.FileInputFormat$Counter"), - FileSystemCounters("FileSystemCounters"), - MapReduce("org.apache.hadoop.mapred.Task$Counter"), - FileOutput("org.apache.hadoop.mapred.FileOutputFormat$Counter"); - - String name; - - GroupName(String name) { - this.name = name; - } - - public String getName() { - return name; - } - } - - public static enum CounterName { - BYTES_READ(GroupName.FileInput, "BYTES_READ"), - BYTES_WRITTEN(GroupName.FileOutput, "BYTES_WRITTEN"), - - FILE_BYTES_READ(GroupName.FileSystemCounters, "FILE_BYTES_READ"), - FILE_BYTES_WRITTEN(GroupName.FileSystemCounters, "FILE_BYTES_WRITTEN"), - HDFS_BYTES_READ(GroupName.FileSystemCounters, "HDFS_BYTES_READ"), - HDFS_BYTES_WRITTEN(GroupName.FileSystemCounters, "HDFS_BYTES_WRITTEN"), - - - MAP_INPUT_RECORDS(GroupName.MapReduce, "MAP_INPUT_RECORDS"), - MAP_OUTPUT_RECORDS(GroupName.MapReduce, "MAP_OUTPUT_RECORDS"), - MAP_OUTPUT_BYTES(GroupName.MapReduce, "MAP_OUTPUT_BYTES"), - MAP_OUTPUT_MATERIALIZED_BYTES(GroupName.MapReduce, "MAP_OUTPUT_MATERIALIZED_BYTES"), - SPLIT_RAW_BYTES(GroupName.MapReduce, "SPLIT_RAW_BYTES"), - - REDUCE_INPUT_GROUPS(GroupName.MapReduce, "REDUCE_INPUT_GROUPS"), - REDUCE_SHUFFLE_BYTES(GroupName.MapReduce, "REDUCE_SHUFFLE_BYTES"), - REDUCE_OUTPUT_RECORDS(GroupName.MapReduce, "REDUCE_OUTPUT_RECORDS"), - REDUCE_INPUT_RECORDS(GroupName.MapReduce, "REDUCE_INPUT_RECORDS"), - - COMBINE_INPUT_RECORDS(GroupName.MapReduce, "COMBINE_INPUT_RECORDS"), - COMBINE_OUTPUT_RECORDS(GroupName.MapReduce, "COMBINE_OUTPUT_RECORDS"), - SPILLED_RECORDS(GroupName.MapReduce, "SPILLED_RECORDS"), - - CPU_MILLISECONDS(GroupName.MapReduce, "CPU_MILLISECONDS"), - COMMITTED_HEAP_BYTES(GroupName.MapReduce, "COMMITTED_HEAP_BYTES"), - PHYSICAL_MEMORY_BYTES(GroupName.MapReduce, "PHYSICAL_MEMORY_BYTES"), - VIRTUAL_MEMORY_BYTES(GroupName.MapReduce, "VIRTUAL_MEMORY_BYTES"); - - GroupName group; - String name; - - CounterName(GroupName group, String name) { - this.group = group; - this.name = name; - } - - public GroupName getGroup() { - return group; - } - - public String getName() { - return name; - } - } -} - - diff --git a/app/com/linkedin/drelephant/hadoop/HadoopJobData.java b/app/com/linkedin/drelephant/hadoop/HadoopJobData.java deleted file mode 100644 index b7a70c5d6..000000000 --- a/app/com/linkedin/drelephant/hadoop/HadoopJobData.java +++ /dev/null @@ -1,97 +0,0 @@ -package com.linkedin.drelephant.hadoop; - -import java.util.Properties; - - -public class HadoopJobData { - private String jobId=""; - private String username = ""; - private String url = ""; - private String jobName = ""; - private long startTime = 0; - private HadoopCounterHolder counterHolder; - private HadoopTaskData[] mapperData; - private HadoopTaskData[] reducerData; - private Properties jobConf; - - public HadoopJobData setJobId(String jobId) { - this.jobId = jobId; - return this; - } - - public HadoopJobData setJobName(String jobName) { - this.jobName = jobName; - return this; - } - - public HadoopJobData setUsername(String username) { - this.username = username; - return this; - } - - public HadoopJobData setStartTime(long startTime) { - this.startTime = startTime; - return this; - } - - public HadoopJobData setUrl(String url) { - this.url = url; - return this; - } - - public HadoopJobData setCounters(HadoopCounterHolder counterHolder) { - this.counterHolder = counterHolder; - return this; - } - - public HadoopJobData setMapperData(HadoopTaskData[] mappers) { - this.mapperData = mappers; - return this; - } - - public HadoopJobData setReducerData(HadoopTaskData[] reducers) { - this.reducerData = reducers; - return this; - } - - public HadoopJobData setJobConf(Properties jobConf) { - this.jobConf = jobConf; - return this; - } - - public HadoopCounterHolder getCounters() { - return counterHolder; - } - - public HadoopTaskData[] getMapperData() { - return mapperData; - } - - public HadoopTaskData[] getReducerData() { - return reducerData; - } - - public Properties getJobConf() { - return jobConf; - } - - public String getUsername() { - return username; - } - - public long getStartTime() { - return startTime; - } - - public String getUrl() { - return url; - } - - public String getJobName() { - return jobName; - } - - public String getJobId() { - return jobId; - } -} diff --git a/app/com/linkedin/drelephant/hadoop/HadoopSecurity.java b/app/com/linkedin/drelephant/hadoop/HadoopSecurity.java deleted file mode 100644 index aeb110b45..000000000 --- a/app/com/linkedin/drelephant/hadoop/HadoopSecurity.java +++ /dev/null @@ -1,58 +0,0 @@ -package com.linkedin.drelephant.hadoop; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.log4j.Logger; -import play.Play; - -import java.io.IOException; -import java.security.PrivilegedAction; - - -public class HadoopSecurity { - private static final Logger logger = Logger.getLogger(HadoopSecurity.class); - - private UserGroupInformation loginUser = null; - - private String keytabLocation; - private String keytabUser; - private boolean securityEnabled = false; - - public HadoopSecurity() throws IOException { - Configuration conf = new Configuration(); - UserGroupInformation.setConfiguration(conf); - securityEnabled = UserGroupInformation.isSecurityEnabled(); - if (securityEnabled) { - keytabLocation = Play.application().configuration().getString("keytab.location"); - keytabUser = Play.application().configuration().getString("keytab.user"); - checkLogin(); - } - } - - public UserGroupInformation getUGI() throws IOException { - checkLogin(); - return loginUser; - } - - public void checkLogin() throws IOException { - - if (loginUser == null) { - logger.info("No login user. Creating login user"); - logger.info("Logging with " + keytabUser + " and " + keytabLocation); - UserGroupInformation.loginUserFromKeytab(keytabUser, keytabLocation); - loginUser = UserGroupInformation.getLoginUser(); - logger.info("Logged in with user " + loginUser); - } else { - loginUser.checkTGTAndReloginFromKeytab(); - } - - } - - public T doAs(PrivilegedAction action) throws IOException { - UserGroupInformation ugi = getUGI(); - if (ugi != null) { - return ugi.doAs(action); - } - return null; - } -} diff --git a/app/com/linkedin/drelephant/hadoop/HadoopTaskData.java b/app/com/linkedin/drelephant/hadoop/HadoopTaskData.java deleted file mode 100644 index 7c599ac36..000000000 --- a/app/com/linkedin/drelephant/hadoop/HadoopTaskData.java +++ /dev/null @@ -1,55 +0,0 @@ -package com.linkedin.drelephant.hadoop; - -public class HadoopTaskData{ - private HadoopCounterHolder counterHolder; - private long startTime = 0; - private long endTime = 0; - private long shuffleTime = 0; - private long sortTime = 0; - private boolean timed = false; - - public HadoopTaskData(HadoopCounterHolder counterHolder, long[] time) { - this.counterHolder = counterHolder; - this.startTime = time[0]; - this.endTime = time[1]; - this.shuffleTime = time[2]; - this.sortTime = time[3]; - this.timed = true; - } - - public HadoopTaskData(HadoopCounterHolder counterHolder) { - this.counterHolder = counterHolder; - } - - public HadoopCounterHolder getCounters() { - return counterHolder; - } - - public long getStartTime() { - return startTime; - } - - public long getEndTime() { - return endTime; - } - - public long getRunTime() { - return endTime - startTime; - } - - public long getExecutionTime() { - return endTime - startTime - shuffleTime - sortTime; - } - - public long getShuffleTime() { - return shuffleTime; - } - - public long getSortTime() { - return sortTime; - } - - public boolean timed() { - return timed; - } -} diff --git a/app/com/linkedin/drelephant/mapreduce/MapReduceMetricsAggregator.java b/app/com/linkedin/drelephant/mapreduce/MapReduceMetricsAggregator.java new file mode 100644 index 000000000..8d2004c0e --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/MapReduceMetricsAggregator.java @@ -0,0 +1,101 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce; + +import com.linkedin.drelephant.analysis.HadoopApplicationData; +import com.linkedin.drelephant.analysis.HadoopMetricsAggregator; +import com.linkedin.drelephant.analysis.HadoopAggregatedData; +import com.linkedin.drelephant.configurations.aggregator.AggregatorConfigurationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import org.apache.commons.io.FileUtils; +import org.apache.log4j.Logger; + + +public class MapReduceMetricsAggregator implements HadoopMetricsAggregator { + + private static final Logger logger = Logger.getLogger(MapReduceMetricsAggregator.class); + private static final String MAP_CONTAINER_CONFIG = "mapreduce.map.memory.mb"; + private static final String REDUCER_CONTAINER_CONFIG = "mapreduce.reduce.memory.mb"; + private static final String REDUCER_SLOW_START_CONFIG = "mapreduce.job.reduce.slowstart.completedmaps"; + private static final long CONTAINER_MEMORY_DEFAULT_MBYTES = 2048L; + + private HadoopAggregatedData _hadoopAggregatedData = null; + private TaskLevelAggregatedMetrics mapTasks; + private TaskLevelAggregatedMetrics reduceTasks; + + private AggregatorConfigurationData _aggregatorConfigurationData; + + public MapReduceMetricsAggregator(AggregatorConfigurationData _aggregatorConfigurationData) { + this._aggregatorConfigurationData = _aggregatorConfigurationData; + _hadoopAggregatedData = new HadoopAggregatedData(); + } + + @Override + public void aggregate(HadoopApplicationData hadoopData) { + + MapReduceApplicationData data = (MapReduceApplicationData) hadoopData; + + long mapTaskContainerSize = getMapContainerSize(data); + long reduceTaskContainerSize = getReducerContainerSize(data); + + int reduceTaskSlowStartPercentage = + (int) (Double.parseDouble(data.getConf().getProperty(REDUCER_SLOW_START_CONFIG)) * 100); + + + //overwrite reduceTaskSlowStartPercentage to 100%. TODO: make use of the slow start percent + reduceTaskSlowStartPercentage = 100; + + mapTasks = new TaskLevelAggregatedMetrics(data.getMapperData(), mapTaskContainerSize, data.getStartTime()); + + long reduceIdealStartTime = mapTasks.getNthPercentileFinishTime(reduceTaskSlowStartPercentage); + + // Mappers list is empty + if(reduceIdealStartTime == -1) { + // ideal start time for reducer is infinite since it cannot start + reduceIdealStartTime = Long.MAX_VALUE; + } + + reduceTasks = new TaskLevelAggregatedMetrics(data.getReducerData(), reduceTaskContainerSize, reduceIdealStartTime); + + _hadoopAggregatedData.setResourceUsed(mapTasks.getResourceUsed() + reduceTasks.getResourceUsed()); + _hadoopAggregatedData.setTotalDelay(mapTasks.getDelay() + reduceTasks.getDelay()); + _hadoopAggregatedData.setResourceWasted(mapTasks.getResourceWasted() + reduceTasks.getResourceWasted()); + } + + @Override + public HadoopAggregatedData getResult() { + return _hadoopAggregatedData; + } + + private long getMapContainerSize(HadoopApplicationData data) { + try { + long value = Long.parseLong(data.getConf().getProperty(MAP_CONTAINER_CONFIG)); + return (value < 0) ? CONTAINER_MEMORY_DEFAULT_MBYTES : value; + } catch ( NumberFormatException ex) { + return CONTAINER_MEMORY_DEFAULT_MBYTES; + } + } + + private long getReducerContainerSize(HadoopApplicationData data) { + try { + long value = Long.parseLong(data.getConf().getProperty(REDUCER_CONTAINER_CONFIG)); + return (value < 0) ? CONTAINER_MEMORY_DEFAULT_MBYTES : value; + } catch ( NumberFormatException ex) { + return CONTAINER_MEMORY_DEFAULT_MBYTES; + } + } +} diff --git a/app/com/linkedin/drelephant/mapreduce/TaskLevelAggregatedMetrics.java b/app/com/linkedin/drelephant/mapreduce/TaskLevelAggregatedMetrics.java new file mode 100644 index 000000000..33b90b7e0 --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/TaskLevelAggregatedMetrics.java @@ -0,0 +1,149 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce; + +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.math.Statistics; +import java.util.ArrayList; +import java.util.List; +import org.apache.commons.io.FileUtils; +import org.apache.log4j.Logger; + + +public class TaskLevelAggregatedMetrics { + + private static final Logger logger = Logger.getLogger(TaskLevelAggregatedMetrics.class); + + private long _delay = 0; + private long _resourceWasted = 0; + private long _resourceUsed = 0; + + private List finishTimes = new ArrayList(); + private List durations = new ArrayList(); + + private static final double MEMORY_BUFFER = 1.5; + private static final double CLUSTER_MEMORY_FACTOR = 2.1; + + /** + * Returns the nth percentile finish job + * @param percentile The percentile of finish job to return + * @return The nth percentile finish job + */ + public long getNthPercentileFinishTime(int percentile) + { + if(finishTimes == null || finishTimes.size() == 0 ) { + return -1; + } + return Statistics.percentile(finishTimes, percentile); + } + + /** + * Constructor for TaskLevelAggregatedMetrics + * @param taskData Array containing the task data for mappers and/or reducers + * @param containerSize The container size of the tasks + * @param idealStartTime The ideal start time for the task. For mappers it is the submit time, for + * reducers, it is the time when the number of completed maps become more than + * the slow start time. + */ + public TaskLevelAggregatedMetrics(MapReduceTaskData[] taskData, long containerSize, long idealStartTime) { + compute(taskData, containerSize, idealStartTime); + } + + /** + * Returns the overall delay for the tasks. + * @return The delay of the tasks. + */ + public long getDelay() { + return _delay; + } + + /** + * Retruns the resources wasted by all the tasks in MB Seconds + * @return The wasted resources of all the tasks in MB Seconds + */ + public long getResourceWasted() { + return _resourceWasted; + } + + /** + * Returns the resource used by all the tasks in MB Seconds + * @return The total resources used by all tasks in MB Seconds + */ + public long getResourceUsed() { + return _resourceUsed; + } + + /** + * Computes the aggregated metrics -> peakMemory, delay, total task duration, wasted resources and memory usage. + * @param taskDatas + * @param containerSize + * @param idealStartTime + */ + private void compute(MapReduceTaskData[] taskDatas, long containerSize, long idealStartTime) { + + long peakMemoryNeed = 0; + long taskFinishTimeMax = 0; + long taskDurationMax = 0; + + // if there are zero tasks, then nothing to compute. + if(taskDatas == null || taskDatas.length == 0) { + return; + } + + for (MapReduceTaskData taskData: taskDatas) { + long taskMemory = taskData.getCounters().get(MapReduceCounterData.CounterName.PHYSICAL_MEMORY_BYTES)/ FileUtils.ONE_MB; // MB + long taskVM = taskData.getCounters().get(MapReduceCounterData.CounterName.VIRTUAL_MEMORY_BYTES)/ FileUtils.ONE_MB; // MB + long taskDuration = taskData.getFinishTimeMs() - taskData.getStartTimeMs(); // Milliseconds + long taskCost = (containerSize) * (taskDuration / Statistics.SECOND_IN_MS); // MB Seconds + + durations.add(taskDuration); + finishTimes.add(taskData.getFinishTimeMs()); + + //peak Memory usage + long memoryRequiredForVM = (long) (taskVM/CLUSTER_MEMORY_FACTOR); + long biggerMemoryRequirement = memoryRequiredForVM > taskMemory ? memoryRequiredForVM : taskMemory; + peakMemoryNeed = biggerMemoryRequirement > peakMemoryNeed ? biggerMemoryRequirement : peakMemoryNeed; + + if(taskFinishTimeMax < taskData.getFinishTimeMs()) { + taskFinishTimeMax = taskData.getFinishTimeMs(); + } + + if(taskDurationMax < taskDuration) { + taskDurationMax = taskDuration; + } + _resourceUsed += taskCost; + } + + // Compute the delay in starting the task. + _delay = taskFinishTimeMax - (idealStartTime + taskDurationMax); + + // invalid delay + if(_delay < 0) { + _delay = 0; + } + + // wastedResources + long wastedMemory = containerSize - (long) (peakMemoryNeed * MEMORY_BUFFER); // give a 50% buffer + if(wastedMemory > 0) { + for (long duration : durations) { + _resourceWasted += (wastedMemory) * (duration / Statistics.SECOND_IN_MS); // MB Seconds + } + } + } + +} diff --git a/app/com/linkedin/drelephant/mapreduce/data/MapReduceApplicationData.java b/app/com/linkedin/drelephant/mapreduce/data/MapReduceApplicationData.java new file mode 100644 index 000000000..c30c46f64 --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/data/MapReduceApplicationData.java @@ -0,0 +1,198 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.data; + +import com.linkedin.drelephant.analysis.ApplicationType; +import com.linkedin.drelephant.analysis.HadoopApplicationData; +import java.util.Properties; + + +/** + * This class contains the MapReduce Application Information + */ +public class MapReduceApplicationData implements HadoopApplicationData { + private static final ApplicationType APPLICATION_TYPE = new ApplicationType("MAPREDUCE"); + + private boolean _succeeded = true; + private String _diagnosticInfo = ""; + private String _appId = ""; + private String _jobId = ""; + private String _username = ""; + private String _url = ""; + private String _jobName = ""; + private long _submitTime = 0; + private long _startTime = 0; + private long _finishTime = 0; + + private MapReduceCounterData _counterHolder; + private MapReduceTaskData[] _mapperData; + private MapReduceTaskData[] _reducerData; + private Properties _jobConf; + private boolean _isRetry = false; + + public MapReduceApplicationData setSucceeded(boolean succeeded) { + this._succeeded = succeeded; + return this; + } + + public MapReduceApplicationData setDiagnosticInfo(String diagnosticInfo) { + this._diagnosticInfo = diagnosticInfo; + return this; + } + + public MapReduceApplicationData setRetry(boolean isRetry) { + this._isRetry = isRetry; + return this; + } + + public MapReduceApplicationData setAppId(String appId) { + this._appId = appId; + return this; + } + + public MapReduceApplicationData setJobId(String jobId) { + this._jobId = jobId; + return this; + } + + public MapReduceApplicationData setJobName(String jobName) { + this._jobName = jobName; + return this; + } + + public MapReduceApplicationData setUsername(String username) { + this._username = username; + return this; + } + + public MapReduceApplicationData setSubmitTime(long submitTime) { + this._submitTime = submitTime; + return this; + } + + public MapReduceApplicationData setStartTime(long startTime) { + this._startTime = startTime; + return this; + } + + public MapReduceApplicationData setFinishTime(long finishTime) { + this._finishTime = finishTime; + return this; + } + + public MapReduceApplicationData setUrl(String url) { + this._url = url; + return this; + } + + public MapReduceApplicationData setCounters(MapReduceCounterData counterHolder) { + this._counterHolder = counterHolder; + return this; + } + + public MapReduceApplicationData setMapperData(MapReduceTaskData[] mappers) { + this._mapperData = mappers; + return this; + } + + public MapReduceApplicationData setReducerData(MapReduceTaskData[] reducers) { + this._reducerData = reducers; + return this; + } + + public MapReduceApplicationData setJobConf(Properties jobConf) { + this._jobConf = jobConf; + return this; + } + + public MapReduceCounterData getCounters() { + return _counterHolder; + } + + public MapReduceTaskData[] getMapperData() { + return _mapperData; + } + + public MapReduceTaskData[] getReducerData() { + return _reducerData; + } + + @Override + public String getAppId() { + return _appId; + } + + @Override + public Properties getConf() { + return _jobConf; + } + + @Override + public ApplicationType getApplicationType() { + return APPLICATION_TYPE; + } + + @Override + public boolean isEmpty() { + return _succeeded && getMapperData().length == 0 && getReducerData().length == 0; + } + + public String getUsername() { + return _username; + } + + public long getSubmitTime() { + return _submitTime; + } + + public long getStartTime() { + return _startTime; + } + + public long getFinishTime() { + return _finishTime; + } + + public String getUrl() { + return _url; + } + + public String getJobName() { + return _jobName; + } + + public boolean isRetryJob() { + return _isRetry; + } + + public String getJobId() { + return _jobId; + } + + public boolean getSucceeded() { + return _succeeded; + } + + public String getDiagnosticInfo() { + return _diagnosticInfo; + } + + @Override + public String toString() { + return "id: " + getJobId() + ", name:" + getJobName(); + } +} diff --git a/app/com/linkedin/drelephant/mapreduce/data/MapReduceCounterData.java b/app/com/linkedin/drelephant/mapreduce/data/MapReduceCounterData.java new file mode 100644 index 000000000..36d9f11d0 --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/data/MapReduceCounterData.java @@ -0,0 +1,181 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.data; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + + +/** + * This class manages all the MapReduce Counters + */ +public class MapReduceCounterData { + + // This is a map of group to all the counters in the group and their values. + private final Map> _pubCounters; + + public String toString() { + return _pubCounters.toString(); + } + + public MapReduceCounterData() { + _pubCounters = new HashMap>(8); + } + + /** + * @return the value of the counter, 0 if not present. + * This method is only used for job heuristics + * Due to h1 & h2 counter group incompatibility, we iterate every counter group (4 by default) + * to find a matching counter name, otherwise we have to hardcode the h1&h2 version of counter group + * and try twice with two names for each counter in this method. + * This approach is less efficient, but cleaner. + */ + public long get(CounterName counterName) { + // For each counter group, try to match the counter name + for(Map counterGrp : _pubCounters.values()) { + if(counterGrp.containsKey(counterName._name)) { + return counterGrp.get(counterName._name); + } + } + return 0; + } + + public void set(CounterName counterName, long value) { + set(counterName.getGroupName(), counterName.getName(), value); + } + + /** + * Set the value of a counter that we may want to publish later + * + * @param groupName + * @param counterName + * @param value + */ + public void set(String groupName, String counterName, long value) { + Map counterMap = _pubCounters.get(groupName); + if (counterMap == null) { + counterMap = new HashMap(4); + _pubCounters.put(groupName, counterMap); + } + counterMap.put(counterName, value); + } + + public Set getGroupNames() { + Set groupNames = _pubCounters.keySet(); + return Collections.unmodifiableSet(groupNames); + } + + /** + * Get the values of all counters in a group + * @param groupName + * @return A map containing all the values of counters in a group. + */ + public Map getAllCountersInGroup(String groupName) { + Map counterMap = _pubCounters.get(groupName); + if (counterMap == null) { + counterMap = new HashMap(1); + } + return counterMap; + } + + public static enum GroupName { + FileInput, + FileSystemCounters, + MapReduce, + FileOutput; + } + + public static enum CounterName { + BYTES_READ(GroupName.FileInput, "BYTES_READ", "Bytes Read"), + BYTES_WRITTEN(GroupName.FileOutput, "BYTES_WRITTEN", "Bytes Written"), + + FILE_BYTES_READ(GroupName.FileSystemCounters, "FILE_BYTES_READ", "FILE_BYTES_READ"), + FILE_BYTES_WRITTEN(GroupName.FileSystemCounters, "FILE_BYTES_WRITTEN", "FILE_BYTES_WRITTEN"), + HDFS_BYTES_READ(GroupName.FileSystemCounters, "HDFS_BYTES_READ", "HDFS_BYTES_READ"), + HDFS_BYTES_WRITTEN(GroupName.FileSystemCounters, "HDFS_BYTES_WRITTEN", "HDFS_BYTES_WRITTEN"), + + MAP_INPUT_RECORDS(GroupName.MapReduce, "MAP_INPUT_RECORDS", "Map input records"), + MAP_OUTPUT_RECORDS(GroupName.MapReduce, "MAP_OUTPUT_RECORDS", "Map output records"), + MAP_OUTPUT_BYTES(GroupName.MapReduce, "MAP_OUTPUT_BYTES", "Map output bytes"), + MAP_OUTPUT_MATERIALIZED_BYTES(GroupName.MapReduce, "MAP_OUTPUT_MATERIALIZED_BYTES", "Map output materialized bytes"), + SPLIT_RAW_BYTES(GroupName.MapReduce, "SPLIT_RAW_BYTES", "SPLIT_RAW_BYTES"), + + REDUCE_INPUT_GROUPS(GroupName.MapReduce, "REDUCE_INPUT_GROUPS", "Reduce input groups"), + REDUCE_SHUFFLE_BYTES(GroupName.MapReduce, "REDUCE_SHUFFLE_BYTES", "Reduce shuffle bytes"), + REDUCE_OUTPUT_RECORDS(GroupName.MapReduce, "REDUCE_OUTPUT_RECORDS", "Reduce output records"), + REDUCE_INPUT_RECORDS(GroupName.MapReduce, "REDUCE_INPUT_RECORDS", "Reduce input records"), + + COMBINE_INPUT_RECORDS(GroupName.MapReduce, "COMBINE_INPUT_RECORDS", "Combine input records"), + COMBINE_OUTPUT_RECORDS(GroupName.MapReduce, "COMBINE_OUTPUT_RECORDS", "Combine output records"), + SPILLED_RECORDS(GroupName.MapReduce, "SPILLED_RECORDS", "Spilled Records"), + + CPU_MILLISECONDS(GroupName.MapReduce, "CPU_MILLISECONDS", "CPU time spent (ms)"), + GC_MILLISECONDS(GroupName.MapReduce, "GC_TIME_MILLIS", "GC time elapsed (ms)"), + COMMITTED_HEAP_BYTES(GroupName.MapReduce, "COMMITTED_HEAP_BYTES", "Total committed heap usage (bytes)"), + PHYSICAL_MEMORY_BYTES(GroupName.MapReduce, "PHYSICAL_MEMORY_BYTES", "Physical memory (bytes) snapshot"), + VIRTUAL_MEMORY_BYTES(GroupName.MapReduce, "VIRTUAL_MEMORY_BYTES", "Virtual memory (bytes) snapshot"); + + GroupName _group; + String _name; + String _displayName; + + CounterName(GroupName group, String name, String displayName) { + this._group = group; + this._name = name; + this._displayName = displayName; + } + + static Map _counterDisplayNameMap; + static Map _counterNameMap; + static { + _counterDisplayNameMap = new HashMap(); + _counterNameMap = new HashMap(); + for (CounterName cn : CounterName.values()) { + _counterDisplayNameMap.put(cn._displayName, cn); + _counterNameMap.put(cn._name, cn); + } + } + + public static CounterName getCounterFromName(String name) { + if (_counterNameMap.containsKey(name)) { + return _counterNameMap.get(name); + } + return null; + } + + public static CounterName getCounterFromDisplayName(String displayName) { + if (_counterDisplayNameMap.containsKey(displayName)) { + return _counterDisplayNameMap.get(displayName); + } + return null; + } + + public String getName() { + return _name; + } + + public String getDisplayName() { + return _displayName; + } + + public String getGroupName() { + return _group.name(); + } + } +} diff --git a/app/com/linkedin/drelephant/mapreduce/data/MapReduceTaskData.java b/app/com/linkedin/drelephant/mapreduce/data/MapReduceTaskData.java new file mode 100644 index 000000000..5be043009 --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/data/MapReduceTaskData.java @@ -0,0 +1,91 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.data; + + +/** + * This class manages the MapReduce Tasks + */ +public class MapReduceTaskData { + + private MapReduceCounterData _counterHolder; + private String _taskId; + // The successful attempt id + private String _attemptId; + private long _totalTimeMs = 0; + private long _shuffleTimeMs = 0; + private long _sortTimeMs = 0; + private long _startTimeMs = 0; + private long _finishTimeMs = 0; + // This flag will only be true when successfully setting time and counter values. + private boolean _isTimeAndCounterDataPresent = false; + + public MapReduceTaskData(String taskId, String taskAttemptId) { + this._taskId = taskId; + this._attemptId = taskAttemptId; + } + + public void setTimeAndCounter(long[] time, MapReduceCounterData counterHolder) { + this._totalTimeMs = time[0]; + this._shuffleTimeMs = time[1]; + this._sortTimeMs = time[2]; + this._startTimeMs = time[3]; + this._finishTimeMs = time[4]; + this._counterHolder = counterHolder; + this._isTimeAndCounterDataPresent = true; + } + + public MapReduceCounterData getCounters() { + return _counterHolder; + } + + public long getTotalRunTimeMs() { + return _totalTimeMs; + } + + public long getCodeExecutionTimeMs() { + return _totalTimeMs - _shuffleTimeMs - _sortTimeMs; + } + + public long getShuffleTimeMs() { + return _shuffleTimeMs; + } + + public long getSortTimeMs() { + return _sortTimeMs; + } + + public long getStartTimeMs() { + return _startTimeMs; + } + + public long getFinishTimeMs() { + return _finishTimeMs; + } + + public boolean isTimeAndCounterDataPresent() { + return _isTimeAndCounterDataPresent; + } + + public String getTaskId() { + return _taskId; + } + + public String getAttemptId() { + return _attemptId; + } +} diff --git a/app/com/linkedin/drelephant/mapreduce/fetchers/MapReduceFSFetcherHadoop2.java b/app/com/linkedin/drelephant/mapreduce/fetchers/MapReduceFSFetcherHadoop2.java new file mode 100644 index 000000000..1c5904b9e --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/fetchers/MapReduceFSFetcherHadoop2.java @@ -0,0 +1,355 @@ +/* + * Copyright 2016 Linkin Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.fetchers; + +import com.linkedin.drelephant.analysis.AnalyticJob; +import com.linkedin.drelephant.configurations.fetcher.FetcherConfigurationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.util.Utils; +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.LocatedFileStatus; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.RemoteIterator; +import org.apache.hadoop.mapreduce.Counter; +import org.apache.hadoop.mapreduce.CounterGroup; +import org.apache.hadoop.mapreduce.Counters; +import org.apache.hadoop.mapreduce.TaskAttemptID; +import org.apache.hadoop.mapreduce.TaskID; +import org.apache.hadoop.mapreduce.TaskType; +import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser; +import org.apache.log4j.Logger; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.TimeZone; + +/** + * This class implements the Fetcher for MapReduce Applications on Hadoop2 + * Instead of fetching data from job history server, it retrieves history logs and job configs from + * HDFS directly. Each job's data consists of a JSON event log file with extension ".jhist" and an + * XML job configuration file. + */ +public class MapReduceFSFetcherHadoop2 extends MapReduceFetcher { + private static final Logger logger = Logger.getLogger(MapReduceFSFetcherHadoop2.class); + + private static final String LOG_SIZE_XML_FIELD = "history_log_size_limit_in_mb"; + private static final String HISTORY_SERVER_TIME_ZONE_XML_FIELD = "history_server_time_zone"; + private static final String TIMESTAMP_DIR_FORMAT = "%04d" + File.separator + "%02d" + File.separator + "%02d"; + private static final int SERIAL_NUMBER_DIRECTORY_DIGITS = 6; + protected static final double DEFALUT_MAX_LOG_SIZE_IN_MB = 500; + + private FileSystem _fs; + private String _historyLocation; + private String _intermediateHistoryLocation; + private double _maxLogSizeInMB; + private TimeZone _timeZone; + + public MapReduceFSFetcherHadoop2(FetcherConfigurationData fetcherConfData) throws IOException { + super(fetcherConfData); + + _maxLogSizeInMB = DEFALUT_MAX_LOG_SIZE_IN_MB; + if (fetcherConfData.getParamMap().get(LOG_SIZE_XML_FIELD) != null) { + double[] logLimitSize = Utils.getParam(fetcherConfData.getParamMap().get(LOG_SIZE_XML_FIELD), 1); + if (logLimitSize != null) { + _maxLogSizeInMB = logLimitSize[0]; + } + } + logger.info("The history log limit of MapReduce application is set to " + _maxLogSizeInMB + " MB"); + + String timeZoneStr = fetcherConfData.getParamMap().get(HISTORY_SERVER_TIME_ZONE_XML_FIELD); + _timeZone = timeZoneStr == null ? TimeZone.getDefault() : TimeZone.getTimeZone(timeZoneStr); + logger.info("Using timezone: " + _timeZone.getID()); + + Configuration conf = new Configuration(); + this._fs = FileSystem.get(conf); + this._historyLocation = conf.get("mapreduce.jobhistory.done-dir"); + this._intermediateHistoryLocation = conf.get("mapreduce.jobhistory.intermediate-done-dir"); + logger.info("Intermediate history dir: " + _intermediateHistoryLocation); + logger.info("History done dir: " + _historyLocation); + } + + public String getHistoryLocation() { + return _historyLocation; + } + + public double getMaxLogSizeInMB() { + return _maxLogSizeInMB; + } + + public TimeZone getTimeZone() { + return _timeZone; + } + + /** + * The location of a job history file is in format: {done-dir}/yyyy/mm/dd/{serialPart}. + * yyyy/mm/dd is the year, month and date of the finish time. + * serialPart is the first 6 digits of the serial number considering it as a 9 digits number. + * PS: The serial number is the last part of an app id. + *

+ * For example, if appId = application_1461566847127_84624, then serial number is 84624. + * Consider it as a 9 digits number, serial number is 000084624. So the corresponding + * serialPart is 000084. If this application finish at 2016-5-30, its history file will locate + * at {done-dir}/2016/05/30/000084 + *

+ *

+ * Furthermore, this location format is only satisfied for finished jobs in {done-dir} and not + * for running jobs in {intermediate-done-dir}. + *

+ */ + protected String getHistoryDir(AnalyticJob job) { + // generate the date part + Calendar timestamp = Calendar.getInstance(_timeZone); + timestamp.setTimeInMillis(job.getFinishTime()); + String datePart = String.format(TIMESTAMP_DIR_FORMAT, + timestamp.get(Calendar.YEAR), + timestamp.get(Calendar.MONTH) + 1, + timestamp.get(Calendar.DAY_OF_MONTH)); + + // generate the serial part + String appId = job.getAppId(); + int serialNumber = Integer.parseInt(appId.substring(appId.lastIndexOf('_') + 1)); + String serialPart = String.format("%09d", serialNumber) + .substring(0, SERIAL_NUMBER_DIRECTORY_DIGITS); + + return StringUtils.join(new String[]{_historyLocation, datePart, serialPart, ""}, File.separator); + } + + private DataFiles getHistoryFiles(AnalyticJob job) throws IOException { + String jobId = Utils.getJobIdFromApplicationId(job.getAppId()); + String jobConfPath = null; + String jobHistPath = null; + + // Search files in done dir + String jobHistoryDirPath = getHistoryDir(job); + RemoteIterator it = _fs.listFiles(new Path(jobHistoryDirPath), false); + while (it.hasNext() && (jobConfPath == null || jobHistPath == null)) { + String name = it.next().getPath().getName(); + if (name.contains(jobId)) { + if (name.endsWith("_conf.xml")) { + jobConfPath = jobHistoryDirPath + name; + } else if (name.endsWith(".jhist")) { + jobHistPath = jobHistoryDirPath + name; + } + } + } + + // If some files are missing, search in the intermediate-done-dir in case the HistoryServer has + // not yet moved them into the done-dir. + String intermediateDirPath = _intermediateHistoryLocation + File.separator + job.getUser() + File.separator; + if (jobConfPath == null) { + jobConfPath = intermediateDirPath + jobId + "_conf.xml"; + if (!_fs.exists(new Path(jobConfPath))) { + throw new FileNotFoundException("Can't find config of " + jobId + " in neither " + + jobHistoryDirPath + " nor " + intermediateDirPath); + } + logger.info("Found job config in intermediate dir: " + jobConfPath); + } + if (jobHistPath == null) { + try { + it = _fs.listFiles(new Path(intermediateDirPath), false); + while (it.hasNext()) { + String name = it.next().getPath().getName(); + if (name.contains(jobId) && name.endsWith(".jhist")) { + jobHistPath = intermediateDirPath + name; + logger.info("Found history file in intermediate dir: " + jobHistPath); + break; + } + } + } catch (FileNotFoundException e) { + logger.error("Intermediate history directory " + intermediateDirPath + " not found"); + } + if (jobHistPath == null) { + throw new FileNotFoundException("Can't find history file of " + jobId + " in neither " + + jobHistoryDirPath + " nor " + intermediateDirPath); + } + } + + return new DataFiles(jobConfPath, jobHistPath); + } + + @Override + public MapReduceApplicationData fetchData(AnalyticJob job) throws IOException { + DataFiles files = getHistoryFiles(job); + String confFile = files.getJobConfPath(); + String histFile = files.getJobHistPath(); + String appId = job.getAppId(); + String jobId = Utils.getJobIdFromApplicationId(appId); + + MapReduceApplicationData jobData = new MapReduceApplicationData(); + jobData.setAppId(appId).setJobId(jobId); + + // Fetch job config + Configuration jobConf = new Configuration(false); + jobConf.addResource(_fs.open(new Path(confFile)), confFile); + Properties jobConfProperties = new Properties(); + for (Map.Entry entry : jobConf) { + jobConfProperties.put(entry.getKey(), entry.getValue()); + } + jobData.setJobConf(jobConfProperties); + + // Check if job history file is too large and should be throttled + if (_fs.getFileStatus(new Path(histFile)).getLen() > _maxLogSizeInMB * FileUtils.ONE_MB) { + String errMsg = "The history log of MapReduce application: " + appId + " is over the limit size of " + + _maxLogSizeInMB + " MB, the parsing process gets throttled."; + logger.warn(errMsg); + jobData.setDiagnosticInfo(errMsg); + jobData.setSucceeded(false); // set succeeded to false to avoid heuristic analysis + return jobData; + } + + // Analyze job history file + JobHistoryParser parser = new JobHistoryParser(_fs, histFile); + JobHistoryParser.JobInfo jobInfo = parser.parse(); + IOException parseException = parser.getParseException(); + if (parseException != null) { + throw new RuntimeException("Could not parse history file " + histFile, parseException); + } + + jobData.setSubmitTime(jobInfo.getSubmitTime()); + jobData.setStartTime(jobInfo.getLaunchTime()); + jobData.setFinishTime(jobInfo.getFinishTime()); + + String state = jobInfo.getJobStatus(); + if (state.equals("SUCCEEDED")) { + + jobData.setSucceeded(true); + + // Fetch job counter + MapReduceCounterData jobCounter = getCounterData(jobInfo.getTotalCounters()); + + // Fetch task data + Map allTasks = jobInfo.getAllTasks(); + List mapperInfoList = new ArrayList(); + List reducerInfoList = new ArrayList(); + for (JobHistoryParser.TaskInfo taskInfo : allTasks.values()) { + if (taskInfo.getTaskType() == TaskType.MAP) { + mapperInfoList.add(taskInfo); + } else { + reducerInfoList.add(taskInfo); + } + } + if (jobInfo.getTotalMaps() > MAX_SAMPLE_SIZE) { + logger.debug(jobId + " total mappers: " + mapperInfoList.size()); + } + if (jobInfo.getTotalReduces() > MAX_SAMPLE_SIZE) { + logger.debug(jobId + " total reducers: " + reducerInfoList.size()); + } + MapReduceTaskData[] mapperList = getTaskData(jobId, mapperInfoList); + MapReduceTaskData[] reducerList = getTaskData(jobId, reducerInfoList); + + jobData.setCounters(jobCounter).setMapperData(mapperList).setReducerData(reducerList); + } else if (state.equals("FAILED")) { + + jobData.setSucceeded(false); + jobData.setDiagnosticInfo(jobInfo.getErrorInfo()); + } else { + // Should not reach here + throw new RuntimeException("Job state not supported. Should be either SUCCEEDED or FAILED"); + } + + return jobData; + } + + private MapReduceCounterData getCounterData(Counters counters) { + MapReduceCounterData holder = new MapReduceCounterData(); + for (CounterGroup group : counters) { + String groupName = group.getName(); + for (Counter counter : group) { + holder.set(groupName, counter.getName(), counter.getValue()); + } + } + return holder; + } + + private long[] getTaskExecTime(JobHistoryParser.TaskAttemptInfo attempInfo) { + long startTime = attempInfo.getStartTime(); + long finishTime = attempInfo.getFinishTime(); + boolean isMapper = (attempInfo.getTaskType() == TaskType.MAP); + + long[] time; + if (isMapper) { + time = new long[]{finishTime - startTime, 0, 0, startTime, finishTime}; + } else { + long shuffleFinishTime = attempInfo.getShuffleFinishTime(); + long mergeFinishTime = attempInfo.getSortFinishTime(); + time = new long[]{finishTime - startTime, shuffleFinishTime - startTime, + mergeFinishTime - shuffleFinishTime, startTime, finishTime}; + } + return time; + } + + protected MapReduceTaskData[] getTaskData(String jobId, List infoList) { + int sampleSize = sampleAndGetSize(jobId, infoList); + + List taskList = new ArrayList(); + for (int i = 0; i < sampleSize; i++) { + JobHistoryParser.TaskInfo tInfo = infoList.get(i); + if (!"SUCCEEDED".equals(tInfo.getTaskStatus())) { + logger.info(String.format("Skipped a failed task of %s: %s", jobId, tInfo.getTaskId().toString())); + continue; + } + + String taskId = tInfo.getTaskId().toString(); + TaskAttemptID attemptId = tInfo.getSuccessfulAttemptId(); + MapReduceTaskData taskData = new MapReduceTaskData(taskId, attemptId.toString()); + + MapReduceCounterData taskCounterData = getCounterData(tInfo.getCounters()); + long[] taskExecTime = getTaskExecTime(tInfo.getAllTaskAttempts().get(attemptId)); + + taskData.setTimeAndCounter(taskExecTime, taskCounterData); + taskList.add(taskData); + } + return taskList.toArray(new MapReduceTaskData[taskList.size()]); + } + + private class DataFiles { + private String jobConfPath; + private String jobHistPath; + + public DataFiles(String confPath, String histPath) { + this.jobConfPath = confPath; + this.jobHistPath = histPath; + } + + public String getJobConfPath() { + return jobConfPath; + } + + public void setJobConfPath(String jobConfPath) { + this.jobConfPath = jobConfPath; + } + + public String getJobHistPath() { + return jobHistPath; + } + + public void setJobHistPath(String jobHistPath) { + this.jobHistPath = jobHistPath; + } + } +} diff --git a/app/com/linkedin/drelephant/mapreduce/fetchers/MapReduceFetcher.java b/app/com/linkedin/drelephant/mapreduce/fetchers/MapReduceFetcher.java new file mode 100644 index 000000000..83b7aef4f --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/fetchers/MapReduceFetcher.java @@ -0,0 +1,57 @@ +/* + * Copyright 2016 Linkin Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.fetchers; + +import com.linkedin.drelephant.analysis.ElephantFetcher; +import com.linkedin.drelephant.configurations.fetcher.FetcherConfigurationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import org.apache.log4j.Logger; + +import java.util.Collections; +import java.util.List; + + +public abstract class MapReduceFetcher implements ElephantFetcher { + private static final Logger logger = Logger.getLogger(MapReduceFetcher.class); + protected static final int MAX_SAMPLE_SIZE = 200; + protected static final String SAMPLING_ENABLED_XML_FIELD = "sampling_enabled"; + + protected FetcherConfigurationData _fetcherConfigurationData; + private boolean _samplingEnabled; + + public MapReduceFetcher(FetcherConfigurationData fetcherConfData) { + this._fetcherConfigurationData = fetcherConfData; + this._samplingEnabled = Boolean.parseBoolean( + fetcherConfData.getParamMap().get(SAMPLING_ENABLED_XML_FIELD)); + } + + protected int sampleAndGetSize(String jobId, List taskList) { + // check if sampling is enabled + if (_samplingEnabled) { + if (taskList.size() > MAX_SAMPLE_SIZE) { + logger.info(jobId + " needs sampling."); + Collections.shuffle(taskList); + } + return Math.min(taskList.size(), MAX_SAMPLE_SIZE); + } + return taskList.size(); + } + + public boolean isSamplingEnabled() { + return _samplingEnabled; + } +} diff --git a/app/com/linkedin/drelephant/mapreduce/fetchers/MapReduceFetcherHadoop2.java b/app/com/linkedin/drelephant/mapreduce/fetchers/MapReduceFetcherHadoop2.java new file mode 100644 index 000000000..4fa89d922 --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/fetchers/MapReduceFetcherHadoop2.java @@ -0,0 +1,436 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.fetchers; + +import com.linkedin.drelephant.analysis.AnalyticJob; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.math.Statistics; +import com.linkedin.drelephant.configurations.fetcher.FetcherConfigurationData; +import com.linkedin.drelephant.util.Utils; + +import java.io.IOException; +import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLConnection; +import java.util.ArrayList; +import java.util.List; +import java.util.Properties; +import java.util.Random; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.authentication.client.AuthenticatedURL; +import org.apache.hadoop.security.authentication.client.AuthenticationException; +import org.apache.log4j.Logger; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.map.ObjectMapper; + + +/** + * This class implements the Fetcher for MapReduce Applications on Hadoop2 + */ +public class MapReduceFetcherHadoop2 extends MapReduceFetcher { + private static final Logger logger = Logger.getLogger(MapReduceFetcherHadoop2.class); + // We provide one minute job fetch delay due to the job sending lag from AM/NM to JobHistoryServer HDFS + + private URLFactory _urlFactory; + private JSONFactory _jsonFactory; + private String _jhistoryWebAddr; + + public MapReduceFetcherHadoop2(FetcherConfigurationData fetcherConfData) throws IOException { + super(fetcherConfData); + + final String jhistoryAddr = new Configuration().get("mapreduce.jobhistory.webapp.address"); + + logger.info("Connecting to the job history server at " + jhistoryAddr + "..."); + _urlFactory = new URLFactory(jhistoryAddr); + logger.info("Connection success."); + + _jsonFactory = new JSONFactory(); + _jhistoryWebAddr = "http://" + jhistoryAddr + "/jobhistory/job/"; + } + + @Override + public MapReduceApplicationData fetchData(AnalyticJob analyticJob) throws IOException, AuthenticationException { + String appId = analyticJob.getAppId(); + MapReduceApplicationData jobData = new MapReduceApplicationData(); + String jobId = Utils.getJobIdFromApplicationId(appId); + jobData.setAppId(appId).setJobId(jobId); + // Change job tracking url to job history page + analyticJob.setTrackingUrl(_jhistoryWebAddr + jobId); + try { + + // Fetch job config + Properties jobConf = _jsonFactory.getProperties(_urlFactory.getJobConfigURL(jobId)); + jobData.setJobConf(jobConf); + + URL jobURL = _urlFactory.getJobURL(jobId); + String state = _jsonFactory.getState(jobURL); + + jobData.setSubmitTime(_jsonFactory.getSubmitTime(jobURL)); + jobData.setStartTime(_jsonFactory.getStartTime(jobURL)); + jobData.setFinishTime(_jsonFactory.getFinishTime(jobURL)); + + if (state.equals("SUCCEEDED")) { + + jobData.setSucceeded(true); + + // Fetch job counter + MapReduceCounterData jobCounter = _jsonFactory.getJobCounter(_urlFactory.getJobCounterURL(jobId)); + + // Fetch task data + URL taskListURL = _urlFactory.getTaskListURL(jobId); + List mapperList = new ArrayList(); + List reducerList = new ArrayList(); + _jsonFactory.getTaskDataAll(taskListURL, jobId, mapperList, reducerList); + + MapReduceTaskData[] mapperData = mapperList.toArray(new MapReduceTaskData[mapperList.size()]); + MapReduceTaskData[] reducerData = reducerList.toArray(new MapReduceTaskData[reducerList.size()]); + + jobData.setCounters(jobCounter).setMapperData(mapperData).setReducerData(reducerData); + } else if (state.equals("FAILED")) { + + jobData.setSucceeded(false); + String diagnosticInfo; + try { + diagnosticInfo = parseException(jobData.getJobId(), _jsonFactory.getDiagnosticInfo(jobURL)); + } catch(Exception e) { + diagnosticInfo = null; + } + jobData.setDiagnosticInfo(diagnosticInfo); + } else { + // Should not reach here + throw new RuntimeException("Job state not supported. Should be either SUCCEEDED or FAILED"); + } + } finally { + ThreadContextMR2.updateAuthToken(); + } + + return jobData; + } + + private String parseException(String jobId, String diagnosticInfo) throws MalformedURLException, IOException, + AuthenticationException { + Matcher m = ThreadContextMR2.getDiagnosticMatcher(diagnosticInfo); + if (m.matches()) { + if (Integer.parseInt(m.group(2)) == 0) { + // This is due to bug in hadoop 2.3 and shoufixed in 2.4 + throw new RuntimeException("Error in diagnosticInfo"); + } + String taskId = m.group(1); + System.out.println("parse succedded. " + m.group(1) + " " + m.group(2)); + return _jsonFactory.getTaskFailedStackTrace(_urlFactory.getTaskAllAttemptsURL(jobId, taskId)); + } + logger.info("Does not match regex!!"); + // Diagnostic info not present in the job. Usually due to exception during AM setup + throw new RuntimeException("No sufficient diagnostic Info"); + } + + private URL getTaskCounterURL(String jobId, String taskId) throws MalformedURLException { + return _urlFactory.getTaskCounterURL(jobId, taskId); + } + + private URL getTaskAttemptURL(String jobId, String taskId, String attemptId) throws MalformedURLException { + return _urlFactory.getTaskAttemptURL(jobId, taskId, attemptId); + } + + private class URLFactory { + + private String _restRoot; + + private URLFactory(String hserverAddr) throws IOException { + _restRoot = "http://" + hserverAddr + "/ws/v1/history/mapreduce/jobs"; + verifyURL(_restRoot); + } + + private void verifyURL(String url) throws IOException { + final URLConnection connection = new URL(url).openConnection(); + // Check service availability + connection.connect(); + return; + } + + private URL getJobURL(String jobId) throws MalformedURLException { + return new URL(_restRoot + "/" + jobId); + } + + private URL getJobConfigURL(String jobId) throws MalformedURLException { + return new URL(_restRoot + "/" + jobId + "/conf"); + } + + private URL getJobCounterURL(String jobId) throws MalformedURLException { + return new URL(_restRoot + "/" + jobId + "/counters"); + } + + private URL getTaskListURL(String jobId) throws MalformedURLException { + return new URL(_restRoot + "/" + jobId + "/tasks"); + } + + private URL getTaskCounterURL(String jobId, String taskId) throws MalformedURLException { + return new URL(_restRoot + "/" + jobId + "/tasks/" + taskId + "/counters"); + } + + private URL getTaskAllAttemptsURL(String jobId, String taskId) throws MalformedURLException { + return new URL(_restRoot + "/" + jobId + "/tasks/" + taskId + "/attempts"); + } + + private URL getTaskAttemptURL(String jobId, String taskId, String attemptId) throws MalformedURLException { + return new URL(_restRoot + "/" + jobId + "/tasks/" + taskId + "/attempts/" + attemptId); + } + } + + private class JSONFactory { + + private long getStartTime(URL url) throws IOException, AuthenticationException { + JsonNode rootNode = ThreadContextMR2.readJsonNode(url); + return rootNode.path("job").path("startTime").getValueAsLong(); + } + + private long getFinishTime(URL url) throws IOException, AuthenticationException { + JsonNode rootNode = ThreadContextMR2.readJsonNode(url); + return rootNode.path("job").path("finishTime").getValueAsLong(); + } + + private long getSubmitTime(URL url) throws IOException, AuthenticationException { + JsonNode rootNode = ThreadContextMR2.readJsonNode(url); + return rootNode.path("job").path("submitTime").getValueAsLong(); + } + + private String getState(URL url) throws IOException, AuthenticationException { + JsonNode rootNode = ThreadContextMR2.readJsonNode(url); + return rootNode.path("job").path("state").getValueAsText(); + } + + private String getDiagnosticInfo(URL url) throws IOException, AuthenticationException { + JsonNode rootNode = ThreadContextMR2.readJsonNode(url); + String diag = rootNode.path("job").path("diagnostics").getValueAsText(); + return diag; + } + + private Properties getProperties(URL url) throws IOException, AuthenticationException { + Properties jobConf = new Properties(); + + JsonNode rootNode = ThreadContextMR2.readJsonNode(url); + JsonNode configs = rootNode.path("conf").path("property"); + + for (JsonNode conf : configs) { + String key = conf.get("name").getValueAsText(); + String val = conf.get("value").getValueAsText(); + jobConf.setProperty(key, val); + } + return jobConf; + } + + private MapReduceCounterData getJobCounter(URL url) throws IOException, AuthenticationException { + MapReduceCounterData holder = new MapReduceCounterData(); + + JsonNode rootNode = ThreadContextMR2.readJsonNode(url); + JsonNode groups = rootNode.path("jobCounters").path("counterGroup"); + + for (JsonNode group : groups) { + for (JsonNode counter : group.path("counter")) { + String counterName = counter.get("name").getValueAsText(); + Long counterValue = counter.get("totalCounterValue").getLongValue(); + String groupName = group.get("counterGroupName").getValueAsText(); + holder.set(groupName, counterName, counterValue); + } + } + return holder; + } + + private MapReduceCounterData getTaskCounter(URL url) throws IOException, AuthenticationException { + JsonNode rootNode = ThreadContextMR2.readJsonNode(url); + JsonNode groups = rootNode.path("jobTaskCounters").path("taskCounterGroup"); + MapReduceCounterData holder = new MapReduceCounterData(); + + for (JsonNode group : groups) { + for (JsonNode counter : group.path("counter")) { + String name = counter.get("name").getValueAsText(); + String groupName = group.get("counterGroupName").getValueAsText(); + Long value = counter.get("value").getLongValue(); + holder.set(groupName, name, value); + } + } + return holder; + } + + private long[] getTaskExecTime(URL url) throws IOException, AuthenticationException { + + JsonNode rootNode = ThreadContextMR2.readJsonNode(url); + JsonNode taskAttempt = rootNode.path("taskAttempt"); + + long startTime = taskAttempt.get("startTime").getLongValue(); + long finishTime = taskAttempt.get("finishTime").getLongValue(); + boolean isMapper = taskAttempt.get("type").getValueAsText().equals("MAP"); + + long[] time; + if (isMapper) { + // No shuffle sore time in Mapper + time = new long[] { finishTime - startTime, 0, 0 ,startTime, finishTime}; + } else { + long shuffleTime = taskAttempt.get("elapsedShuffleTime").getLongValue(); + long sortTime = taskAttempt.get("elapsedMergeTime").getLongValue(); + time = new long[] { finishTime - startTime, shuffleTime, sortTime, startTime, finishTime }; + } + + return time; + } + + private void getTaskDataAll(URL url, String jobId, List mapperList, + List reducerList) throws IOException, AuthenticationException { + + JsonNode rootNode = ThreadContextMR2.readJsonNode(url); + JsonNode tasks = rootNode.path("tasks").path("task"); + + for (JsonNode task : tasks) { + String state = task.get("state").getValueAsText(); + if (!state.equals("SUCCEEDED")) { + // This is a failed task. + continue; + } + String taskId = task.get("id").getValueAsText(); + String attemptId = task.get("successfulAttempt").getValueAsText(); + boolean isMapper = task.get("type").getValueAsText().equals("MAP"); + + if (isMapper) { + mapperList.add(new MapReduceTaskData(taskId, attemptId)); + } else { + reducerList.add(new MapReduceTaskData(taskId, attemptId)); + } + } + + getTaskData(jobId, mapperList); + getTaskData(jobId, reducerList); + } + + private void getTaskData(String jobId, List taskList) throws IOException, AuthenticationException { + + int sampleSize = sampleAndGetSize(jobId, taskList); + + for(int i=0; i < sampleSize; i++) { + MapReduceTaskData data = taskList.get(i); + + URL taskCounterURL = getTaskCounterURL(jobId, data.getTaskId()); + MapReduceCounterData taskCounter = getTaskCounter(taskCounterURL); + + URL taskAttemptURL = getTaskAttemptURL(jobId, data.getTaskId(), data.getAttemptId()); + long[] taskExecTime = getTaskExecTime(taskAttemptURL); + + data.setTimeAndCounter(taskExecTime, taskCounter); + } + } + + private String getTaskFailedStackTrace(URL taskAllAttemptsUrl) throws IOException, AuthenticationException { + JsonNode rootNode = ThreadContextMR2.readJsonNode(taskAllAttemptsUrl); + JsonNode tasks = rootNode.path("taskAttempts").path("taskAttempt"); + for (JsonNode task : tasks) { + String state = task.get("state").getValueAsText(); + if (!state.equals("FAILED")) { + continue; + } + String stacktrace = task.get("diagnostics").getValueAsText(); + if (stacktrace.startsWith("Error:")) { + return stacktrace; + } else { + // This is not a valid stacktrace. Might due to a bug in hadoop2.3 and fixed in 2.4 + throw new RuntimeException("This is not a valid stack trace."); + } + } + throw new RuntimeException("No failed task attempt in this failed task."); + } + } +} + +final class ThreadContextMR2 { + private static final Logger logger = Logger.getLogger(ThreadContextMR2.class); + private static final AtomicInteger THREAD_ID = new AtomicInteger(1); + + private static final ThreadLocal _LOCAL_THREAD_ID = new ThreadLocal() { + @Override + public Integer initialValue() { + return THREAD_ID.getAndIncrement(); + } + }; + + private static final ThreadLocal _LOCAL_LAST_UPDATED = new ThreadLocal(); + private static final ThreadLocal _LOCAL_UPDATE_INTERVAL = new ThreadLocal(); + + private static final ThreadLocal _LOCAL_DIAGNOSTIC_PATTERN = new ThreadLocal() { + @Override + public Pattern initialValue() { + // Example: "Task task_1443068695259_9143_m_000475 failed 1 times" + return Pattern.compile( + "Task[\\s\\u00A0]+(.*)[\\s\\u00A0]+failed[\\s\\u00A0]+([0-9])[\\s\\u00A0]+times[\\s\\u00A0]+"); + } + }; + + private static final ThreadLocal _LOCAL_AUTH_TOKEN = + new ThreadLocal() { + @Override + public AuthenticatedURL.Token initialValue() { + _LOCAL_LAST_UPDATED.set(System.currentTimeMillis()); + // Random an interval for each executor to avoid update token at the same time + _LOCAL_UPDATE_INTERVAL.set(Statistics.MINUTE_IN_MS * 30 + new Random().nextLong() + % (3 * Statistics.MINUTE_IN_MS)); + logger.info("Executor " + _LOCAL_THREAD_ID.get() + " update interval " + _LOCAL_UPDATE_INTERVAL.get() * 1.0 + / Statistics.MINUTE_IN_MS); + return new AuthenticatedURL.Token(); + } + }; + + private static final ThreadLocal _LOCAL_AUTH_URL = new ThreadLocal() { + @Override + public AuthenticatedURL initialValue() { + return new AuthenticatedURL(); + } + }; + + private static final ThreadLocal _LOCAL_MAPPER = new ThreadLocal() { + @Override + public ObjectMapper initialValue() { + return new ObjectMapper(); + } + }; + + private ThreadContextMR2() { + // Empty on purpose + } + + public static Matcher getDiagnosticMatcher(String diagnosticInfo) { + return _LOCAL_DIAGNOSTIC_PATTERN.get().matcher(diagnosticInfo); + } + + public static JsonNode readJsonNode(URL url) throws IOException, AuthenticationException { + HttpURLConnection conn = _LOCAL_AUTH_URL.get().openConnection(url, _LOCAL_AUTH_TOKEN.get()); + return _LOCAL_MAPPER.get().readTree(conn.getInputStream()); + } + + public static void updateAuthToken() { + long curTime = System.currentTimeMillis(); + if (curTime - _LOCAL_LAST_UPDATED.get() > _LOCAL_UPDATE_INTERVAL.get()) { + logger.info("Executor " + _LOCAL_THREAD_ID.get() + " updates its AuthenticatedToken."); + _LOCAL_AUTH_TOKEN.set(new AuthenticatedURL.Token()); + _LOCAL_AUTH_URL.set(new AuthenticatedURL()); + _LOCAL_LAST_UPDATED.set(curTime); + } + } +} diff --git a/app/com/linkedin/drelephant/mapreduce/heuristics/DistributedCacheLimitHeuristic.java b/app/com/linkedin/drelephant/mapreduce/heuristics/DistributedCacheLimitHeuristic.java new file mode 100644 index 000000000..671e321ad --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/heuristics/DistributedCacheLimitHeuristic.java @@ -0,0 +1,157 @@ +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import org.apache.commons.io.FileUtils; +import org.apache.log4j.Logger; + +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; + + +/** + * Rule flags jobs which put files more than 500MB in the distributed cache. + */ +public class DistributedCacheLimitHeuristic implements Heuristic { + private static final Logger logger = Logger.getLogger(DistributedCacheLimitHeuristic.class); + private static final String DISTRIBUTED_CACHE_FILE_SIZE_LIMIT_CONF = "distributed.cache.file.size.limit"; + private static final String MAPREDUCE_JOB_CACHE_FILES_FILESIZES = "mapreduce.job.cache.files.filesizes"; + private static final String MAPREDUCE_JOB_CACHE_ARCHIVES_FILESIZES = "mapreduce.job.cache.archives.filesizes"; + private static final String MAPREDUCE_JOB_CACHE_FILES = "mapreduce.job.cache.files"; + private static final String MAPREDUCE_JOB_CACHE_ARCHIVES = "mapreduce.job.cache.archives"; + private static long distributedCacheFileSizeLimit = 500 * FileUtils.ONE_MB; // 500MB default + private HeuristicConfigurationData _heuristicConfData; + + public DistributedCacheLimitHeuristic(HeuristicConfigurationData heuristicConfData) { + this._heuristicConfData = heuristicConfData; + loadParameters(); + } + + private void loadParameters() { + Map paramMap = _heuristicConfData.getParamMap(); + String heuristicName = _heuristicConfData.getHeuristicName(); + + String cacheLimit = paramMap.get(DISTRIBUTED_CACHE_FILE_SIZE_LIMIT_CONF); + if (cacheLimit != null) { + try { + distributedCacheFileSizeLimit = Long.parseLong(cacheLimit); + logger.info( + heuristicName + " will use " + DISTRIBUTED_CACHE_FILE_SIZE_LIMIT_CONF + " with the following setting: " + + distributedCacheFileSizeLimit); + } catch (NumberFormatException e) { + logger + .warn("Error parsing " + DISTRIBUTED_CACHE_FILE_SIZE_LIMIT_CONF + " from the conf file. Check for typos...", + e); + } + } + } + + @Override + public HeuristicResult apply(MapReduceApplicationData data) { + if (data == null || !data.getSucceeded()) { + return null; + } + + Properties jobConf = data.getConf(); + String cacheFiles = jobConf.getProperty(MAPREDUCE_JOB_CACHE_FILES, null); + String cacheFileSizes = jobConf.getProperty(MAPREDUCE_JOB_CACHE_FILES_FILESIZES, null); + + HeuristicResult result = null; + + if (cacheFiles != null && cacheFileSizes != null) { + result = + new HeuristicResult(_heuristicConfData.getClassName(), _heuristicConfData.getHeuristicName(), Severity.NONE, + 0); + List cacheFilesList = new ArrayList(Arrays.asList(cacheFiles.split(","))); + List cacheFileSizesList = new ArrayList(Arrays.asList(cacheFileSizes.split(","))); + + int cacheFilesCount = cacheFilesList.size(); + int cacheFileSizesCount = cacheFileSizesList.size(); + + if (cacheFilesCount != cacheFileSizesCount) { + result.setSeverity(Severity.MODERATE); + logger.warn("Mismatch in the number of files and their corresponding sizes for " + MAPREDUCE_JOB_CACHE_FILES); + result.addResultDetail(MAPREDUCE_JOB_CACHE_FILES, Integer.toString(cacheFilesCount)); + result.addResultDetail(MAPREDUCE_JOB_CACHE_FILES_FILESIZES, Integer.toString(cacheFileSizesCount)); + return result; + } + + Map cacheFileToSizeMap = new HashMap(); + for (int i = 0; i < cacheFilesCount; i++) { + cacheFileToSizeMap.put(cacheFilesList.get(i), cacheFileSizesList.get(i)); + } + + if (checkFileSizeLimit(result, cacheFileToSizeMap)) { + result.setSeverity(Severity.CRITICAL); + } + } + + String archiveCacheFiles = jobConf.getProperty(MAPREDUCE_JOB_CACHE_ARCHIVES, null); + String archiveCacheFileSizes = jobConf.getProperty(MAPREDUCE_JOB_CACHE_ARCHIVES_FILESIZES, null); + + if (archiveCacheFiles != null && archiveCacheFileSizes != null) { + + if (result == null) { + result = + new HeuristicResult(_heuristicConfData.getClassName(), _heuristicConfData.getHeuristicName(), Severity.NONE, + 0); + } + + List archiveCacheFilesList = new ArrayList(Arrays.asList(archiveCacheFiles.split(","))); + List archiveCacheFileSizesList = new ArrayList(Arrays.asList(archiveCacheFileSizes.split(","))); + + int archiveCacheFilesCount = archiveCacheFilesList.size(); + int archiveCacheFileSizesCount = archiveCacheFileSizesList.size(); + + if (archiveCacheFilesCount != archiveCacheFileSizesCount) { + result.setSeverity(Severity.MODERATE); + logger + .warn("Mismatch in the number of files and their corresponding sizes for " + MAPREDUCE_JOB_CACHE_ARCHIVES); + result.addResultDetail(MAPREDUCE_JOB_CACHE_ARCHIVES, Integer.toString(archiveCacheFilesCount)); + result.addResultDetail(MAPREDUCE_JOB_CACHE_ARCHIVES_FILESIZES, Integer.toString(archiveCacheFileSizesCount)); + return result; + } + + Map archiveCacheFileToSizeMap = new HashMap(); + for (int i = 0; i < archiveCacheFilesCount; i++) { + archiveCacheFileToSizeMap.put(archiveCacheFilesList.get(i), archiveCacheFileSizesList.get(i)); + } + + if (checkFileSizeLimit(result, archiveCacheFileToSizeMap)) { + result.setSeverity(Severity.CRITICAL); + } + } + + return result; + } + + private boolean checkFileSizeLimit(HeuristicResult result, Map cacheFileToSizeMap) { + boolean limitViolated = false; + for (String file : cacheFileToSizeMap.keySet()) { + long size = 0; + try { + size = Long.parseLong(cacheFileToSizeMap.get(file)); + } catch (NumberFormatException e) { + logger.warn("Unable to parse file size value: " + size + " for file: " + file); + } + + if (size > distributedCacheFileSizeLimit) { + limitViolated = true; + result.addResultDetail(file, Long.toString(size)); + } + } + return limitViolated; + } + + @Override + public HeuristicConfigurationData getHeuristicConfData() { + return _heuristicConfData; + } +} diff --git a/app/com/linkedin/drelephant/mapreduce/heuristics/ExceptionHeuristic.java b/app/com/linkedin/drelephant/mapreduce/heuristics/ExceptionHeuristic.java new file mode 100644 index 000000000..b5cb7e52f --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/heuristics/ExceptionHeuristic.java @@ -0,0 +1,56 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; + + +public class ExceptionHeuristic implements Heuristic { + + private HeuristicConfigurationData _heuristicConfData; + + public ExceptionHeuristic(HeuristicConfigurationData heuristicConfData) { + this._heuristicConfData = heuristicConfData; + } + + @Override + public HeuristicConfigurationData getHeuristicConfData() { + return _heuristicConfData; + } + + @Override + public HeuristicResult apply(MapReduceApplicationData data) { + if (data.getSucceeded()) { + return null; + } + HeuristicResult result = new HeuristicResult( + _heuristicConfData.getClassName(), _heuristicConfData.getHeuristicName(), Severity.MODERATE, 0); + String diagnosticInfo = data.getDiagnosticInfo(); + if (diagnosticInfo != null) { + result.addResultDetail("Error", "Stacktrace", diagnosticInfo); + } else { + String msg = "Unable to find stacktrace info. Please find the real problem in the Jobhistory link above." + + "Exception can happen either in task log or Application Master log."; + result.addResultDetail("Error", msg); + } + return result; + } +} diff --git a/app/com/linkedin/drelephant/mapreduce/heuristics/GenericDataSkewHeuristic.java b/app/com/linkedin/drelephant/mapreduce/heuristics/GenericDataSkewHeuristic.java new file mode 100644 index 000000000..b46a2bcdb --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/heuristics/GenericDataSkewHeuristic.java @@ -0,0 +1,165 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.google.common.primitives.Longs; +import com.linkedin.drelephant.analysis.HDFSContext; +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.math.Statistics; + +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import com.linkedin.drelephant.util.Utils; +import java.util.Arrays; +import java.util.ArrayList; +import java.util.Map; +import java.util.List; + +import org.apache.commons.io.FileUtils; +import org.apache.log4j.Logger; + + +/** + * This Heuristic analyses the skewness in the task input data + */ +public abstract class GenericDataSkewHeuristic implements Heuristic { + private static final Logger logger = Logger.getLogger(GenericDataSkewHeuristic.class); + + // Severity Parameters + private static final String NUM_TASKS_SEVERITY = "num_tasks_severity"; + private static final String DEVIATION_SEVERITY = "deviation_severity"; + private static final String FILES_SEVERITY = "files_severity"; + + // Default value of parameters + private double[] numTasksLimits = {10, 50, 100, 200}; // Number of map or reduce tasks + private double[] deviationLimits = {2, 4, 8, 16}; // Deviation in i/p bytes btw 2 groups + private double[] filesLimits = {1d/8, 1d/4, 1d/2, 1d}; // Fraction of HDFS Block Size + + private MapReduceCounterData.CounterName _counterName; + private HeuristicConfigurationData _heuristicConfData; + + private void loadParameters() { + Map paramMap = _heuristicConfData.getParamMap(); + String heuristicName = _heuristicConfData.getHeuristicName(); + + double[] confNumTasksThreshold = Utils.getParam(paramMap.get(NUM_TASKS_SEVERITY), numTasksLimits.length); + if (confNumTasksThreshold != null) { + numTasksLimits = confNumTasksThreshold; + } + logger.info(heuristicName + " will use " + NUM_TASKS_SEVERITY + " with the following threshold settings: " + + Arrays.toString(numTasksLimits)); + + double[] confDeviationThreshold = Utils.getParam(paramMap.get(DEVIATION_SEVERITY), deviationLimits.length); + if (confDeviationThreshold != null) { + deviationLimits = confDeviationThreshold; + } + logger.info(heuristicName + " will use " + DEVIATION_SEVERITY + " with the following threshold settings: " + + Arrays.toString(deviationLimits)); + + double[] confFilesThreshold = Utils.getParam(paramMap.get(FILES_SEVERITY), filesLimits.length); + if (confFilesThreshold != null) { + filesLimits = confFilesThreshold; + } + logger.info(heuristicName + " will use " + FILES_SEVERITY + " with the following threshold settings: " + + Arrays.toString(filesLimits)); + for (int i = 0; i < filesLimits.length; i++) { + filesLimits[i] = filesLimits[i] * HDFSContext.HDFS_BLOCK_SIZE; + } + } + + protected GenericDataSkewHeuristic(MapReduceCounterData.CounterName counterName, + HeuristicConfigurationData heuristicConfData) { + this._counterName = counterName; + this._heuristicConfData = heuristicConfData; + + loadParameters(); + } + + protected abstract MapReduceTaskData[] getTasks(MapReduceApplicationData data); + + @Override + public HeuristicConfigurationData getHeuristicConfData() { + return _heuristicConfData; + } + + @Override + public HeuristicResult apply(MapReduceApplicationData data) { + + if(!data.getSucceeded()) { + return null; + } + + MapReduceTaskData[] tasks = getTasks(data); + + //Gather data + List inputBytes = new ArrayList(); + + for (int i = 0; i < tasks.length; i++) { + if (tasks[i].isTimeAndCounterDataPresent()) { + inputBytes.add(tasks[i].getCounters().get(_counterName)); + } + } + + // Ratio of total tasks / sampled tasks + double scale = ((double)tasks.length) / inputBytes.size(); + //Analyze data. TODO: This is a temp fix. findTwogroups should support list as input + long[][] groups = Statistics.findTwoGroups(Longs.toArray(inputBytes)); + + long avg1 = Statistics.average(groups[0]); + long avg2 = Statistics.average(groups[1]); + + long min = Math.min(avg1, avg2); + long diff = Math.abs(avg2 - avg1); + + Severity severity = getDeviationSeverity(min, diff); + + //This reduces severity if the largest file sizes are insignificant + severity = Severity.min(severity, getFilesSeverity(avg2)); + + //This reduces severity if number of tasks is insignificant + severity = Severity.min(severity, Severity.getSeverityAscending( + groups[0].length, numTasksLimits[0], numTasksLimits[1], numTasksLimits[2], numTasksLimits[3])); + + HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(), + _heuristicConfData.getHeuristicName(), severity, Utils.getHeuristicScore(severity, tasks.length)); + + result.addResultDetail("Number of tasks", Integer.toString(tasks.length)); + result.addResultDetail("Group A", groups[0].length + " tasks @ " + FileUtils.byteCountToDisplaySize(avg1) + " avg"); + result.addResultDetail("Group B", groups[1].length + " tasks @ " + FileUtils.byteCountToDisplaySize(avg2) + " avg"); + + return result; + } + + private Severity getDeviationSeverity(long averageMin, long averageDiff) { + if (averageMin <= 0) { + averageMin = 1; + } + long value = averageDiff / averageMin; + return Severity.getSeverityAscending( + value, deviationLimits[0], deviationLimits[1], deviationLimits[2], deviationLimits[3]); + } + + private Severity getFilesSeverity(long value) { + return Severity.getSeverityAscending( + value, filesLimits[0], filesLimits[1], filesLimits[2], filesLimits[3]); + } + +} diff --git a/app/com/linkedin/drelephant/mapreduce/heuristics/GenericGCHeuristic.java b/app/com/linkedin/drelephant/mapreduce/heuristics/GenericGCHeuristic.java new file mode 100644 index 000000000..c9c6cf2f8 --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/heuristics/GenericGCHeuristic.java @@ -0,0 +1,145 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import com.linkedin.drelephant.util.Utils; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.math.Statistics; +import java.util.Map; +import org.apache.log4j.Logger; + + +/** + * Analyses garbage collection efficiency + */ +public abstract class GenericGCHeuristic implements Heuristic { + private static final Logger logger = Logger.getLogger(GenericGCHeuristic.class); + + // Severity Parameters + private static final String GC_RATIO_SEVERITY = "gc_ratio_severity"; + private static final String RUNTIME_SEVERITY = "runtime_severity_in_min"; + + // Default value of parameters + private double[] gcRatioLimits = {0.01d, 0.02d, 0.03d, 0.04d}; // Garbage Collection Time / CPU Time + private double[] runtimeLimits = {5, 10, 12, 15}; // Task Runtime in milli sec + + private HeuristicConfigurationData _heuristicConfData; + + private void loadParameters() { + Map paramMap = _heuristicConfData.getParamMap(); + String heuristicName = _heuristicConfData.getHeuristicName(); + + double[] confGcRatioThreshold = Utils.getParam(paramMap.get(GC_RATIO_SEVERITY), gcRatioLimits.length); + if (confGcRatioThreshold != null) { + gcRatioLimits = confGcRatioThreshold; + } + logger.info(heuristicName + " will use " + GC_RATIO_SEVERITY + " with the following threshold settings: " + + Arrays.toString(gcRatioLimits)); + + double[] confRuntimeThreshold = Utils.getParam(paramMap.get(RUNTIME_SEVERITY), runtimeLimits.length); + if (confRuntimeThreshold != null) { + runtimeLimits = confRuntimeThreshold; + } + logger.info(heuristicName + " will use " + RUNTIME_SEVERITY + " with the following threshold settings: " + + Arrays.toString(runtimeLimits)); + for (int i = 0; i < runtimeLimits.length; i++) { + runtimeLimits[i] = runtimeLimits[i] * Statistics.MINUTE_IN_MS; + } + } + + protected GenericGCHeuristic(HeuristicConfigurationData heuristicConfData) { + this._heuristicConfData = heuristicConfData; + + loadParameters(); + } + + protected abstract MapReduceTaskData[] getTasks(MapReduceApplicationData data); + + @Override + public HeuristicConfigurationData getHeuristicConfData() { + return _heuristicConfData; + } + + @Override + public HeuristicResult apply(MapReduceApplicationData data) { + + if(!data.getSucceeded()) { + return null; + } + + MapReduceTaskData[] tasks = getTasks(data); + List gcMs = new ArrayList(); + List cpuMs = new ArrayList(); + List runtimesMs = new ArrayList(); + + for (MapReduceTaskData task : tasks) { + if (task.isTimeAndCounterDataPresent()) { + runtimesMs.add(task.getTotalRunTimeMs()); + gcMs.add(task.getCounters().get(MapReduceCounterData.CounterName.GC_MILLISECONDS)); + cpuMs.add(task.getCounters().get(MapReduceCounterData.CounterName.CPU_MILLISECONDS)); + } + } + + long avgRuntimeMs = Statistics.average(runtimesMs); + long avgCpuMs = Statistics.average(cpuMs); + long avgGcMs = Statistics.average(gcMs); + double ratio = avgCpuMs != 0 ? avgGcMs*(1.0)/avgCpuMs: 0; + + Severity severity; + if (tasks.length == 0) { + severity = Severity.NONE; + } else { + severity = getGcRatioSeverity(avgRuntimeMs, avgCpuMs, avgGcMs); + } + + HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(), + _heuristicConfData.getHeuristicName(), severity, Utils.getHeuristicScore(severity, tasks.length)); + + result.addResultDetail("Number of tasks", Integer.toString(tasks.length)); + result.addResultDetail("Avg task runtime (ms)", Long.toString(avgRuntimeMs)); + result.addResultDetail("Avg task CPU time (ms)", Long.toString(avgCpuMs)); + result.addResultDetail("Avg task GC time (ms)", Long.toString(avgGcMs)); + result.addResultDetail("Task GC/CPU ratio", Double.toString(ratio)); + return result; + } + + private Severity getGcRatioSeverity(long runtimeMs, long cpuMs, long gcMs) { + double gcRatio = ((double)gcMs)/cpuMs; + Severity ratioSeverity = Severity.getSeverityAscending( + gcRatio, gcRatioLimits[0], gcRatioLimits[1], gcRatioLimits[2], gcRatioLimits[3]); + + // Severity is reduced if task runtime is insignificant + Severity runtimeSeverity = getRuntimeSeverity(runtimeMs); + + return Severity.min(ratioSeverity, runtimeSeverity); + } + + private Severity getRuntimeSeverity(long runtimeMs) { + return Severity.getSeverityAscending( + runtimeMs, runtimeLimits[0], runtimeLimits[1], runtimeLimits[2], runtimeLimits[3]); + } + +} diff --git a/app/com/linkedin/drelephant/mapreduce/heuristics/GenericMemoryHeuristic.java b/app/com/linkedin/drelephant/mapreduce/heuristics/GenericMemoryHeuristic.java new file mode 100644 index 000000000..04644dc51 --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/heuristics/GenericMemoryHeuristic.java @@ -0,0 +1,211 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import com.linkedin.drelephant.util.Utils; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.math.Statistics; + +import java.util.Map; +import org.apache.commons.io.FileUtils; +import org.apache.log4j.Logger; + + +/** + * This heuristic deals with the efficiency of container size + */ +public abstract class GenericMemoryHeuristic implements Heuristic { + private static final Logger logger = Logger.getLogger(GenericMemoryHeuristic.class); + private static final long CONTAINER_MEMORY_DEFAULT_MBYTES = 2048L; + + // Severity Parameters + private static final String MEM_RATIO_SEVERITY = "memory_ratio_severity"; + private static final String CONTAINER_MEM_SEVERITY = "container_memory_severity"; + private static final String CONTAINER_MEM_DEFAULT_MB = "container_memory_default_mb"; + + // Default value of parameters + private double[] memRatioLimits = {0.6d, 0.5d, 0.4d, 0.3d}; // Avg Physical Mem of Tasks / Container Mem + private double[] memoryLimits = {1.1d, 1.5d, 2.0d, 2.5d}; // Container Memory Severity Limits + + private String _containerMemConf; + private HeuristicConfigurationData _heuristicConfData; + + private long getContainerMemDefaultMBytes() { + Map paramMap = _heuristicConfData.getParamMap(); + if (paramMap.containsKey(CONTAINER_MEM_DEFAULT_MB)) { + String strValue = paramMap.get(CONTAINER_MEM_DEFAULT_MB); + try { + return Long.valueOf(strValue); + } + catch (NumberFormatException e) { + logger.warn(CONTAINER_MEM_DEFAULT_MB + ": expected number [" + strValue + "]"); + } + } + return CONTAINER_MEMORY_DEFAULT_MBYTES; + } + + private void loadParameters() { + Map paramMap = _heuristicConfData.getParamMap(); + String heuristicName = _heuristicConfData.getHeuristicName(); + + double[] confMemRatioLimits = Utils.getParam(paramMap.get(MEM_RATIO_SEVERITY), memRatioLimits.length); + if (confMemRatioLimits != null) { + memRatioLimits = confMemRatioLimits; + } + logger.info(heuristicName + " will use " + MEM_RATIO_SEVERITY + " with the following threshold settings: " + + Arrays.toString(memRatioLimits)); + + long containerMemDefaultBytes = getContainerMemDefaultMBytes() * FileUtils.ONE_MB; + logger.info(heuristicName + " will use " + CONTAINER_MEM_DEFAULT_MB + " with the following threshold setting: " + + containerMemDefaultBytes); + + double[] confMemoryLimits = Utils.getParam(paramMap.get(CONTAINER_MEM_SEVERITY), memoryLimits.length); + if (confMemoryLimits != null) { + memoryLimits = confMemoryLimits; + } + logger.info(heuristicName + " will use " + CONTAINER_MEM_SEVERITY + " with the following threshold settings: " + + Arrays.toString(memoryLimits)); + for (int i = 0; i < memoryLimits.length; i++) { + memoryLimits[i] = memoryLimits[i] * containerMemDefaultBytes; + } + } + + protected GenericMemoryHeuristic(String containerMemConf, HeuristicConfigurationData heuristicConfData) { + this._containerMemConf = containerMemConf; + this._heuristicConfData = heuristicConfData; + + loadParameters(); + } + + protected abstract MapReduceTaskData[] getTasks(MapReduceApplicationData data); + + @Override + public HeuristicConfigurationData getHeuristicConfData() { + return _heuristicConfData; + } + + @Override + public HeuristicResult apply(MapReduceApplicationData data) { + + if(!data.getSucceeded()) { + return null; + } + + String containerSizeStr = data.getConf().getProperty(_containerMemConf); + long containerMem = -1L; + + if (containerSizeStr != null) { + try { + containerMem = Long.parseLong(containerSizeStr); + } catch (NumberFormatException e0) { + // Some job has a string var like "${VAR}" for this config. + if(containerSizeStr.startsWith("$")) { + String realContainerConf = containerSizeStr.substring(containerSizeStr.indexOf("{")+1, + containerSizeStr.indexOf("}")); + String realContainerSizeStr = data.getConf().getProperty(realContainerConf); + try { + containerMem = Long.parseLong(realContainerSizeStr); + } + catch (NumberFormatException e1) { + logger.warn(realContainerConf + ": expected number [" + realContainerSizeStr + "]"); + } + } else { + logger.warn(_containerMemConf + ": expected number [" + containerSizeStr + "]"); + } + } + } + if (containerMem < 0) { + containerMem = getContainerMemDefaultMBytes(); + } + containerMem *= FileUtils.ONE_MB; + + MapReduceTaskData[] tasks = getTasks(data); + List taskPMems = new ArrayList(); + List taskVMems = new ArrayList(); + List runtimesMs = new ArrayList(); + long taskPMin = Long.MAX_VALUE; + long taskPMax = 0; + for (MapReduceTaskData task : tasks) { + if (task.isTimeAndCounterDataPresent()) { + runtimesMs.add(task.getTotalRunTimeMs()); + long taskPMem = task.getCounters().get(MapReduceCounterData.CounterName.PHYSICAL_MEMORY_BYTES); + long taskVMem = task.getCounters().get(MapReduceCounterData.CounterName.VIRTUAL_MEMORY_BYTES); + taskPMems.add(taskPMem); + taskPMin = Math.min(taskPMin, taskPMem); + taskPMax = Math.max(taskPMax, taskPMem); + taskVMems.add(taskVMem); + } + } + + if(taskPMin == Long.MAX_VALUE) { + taskPMin = 0; + } + + long taskPMemAvg = Statistics.average(taskPMems); + long taskVMemAvg = Statistics.average(taskVMems); + long averageTimeMs = Statistics.average(runtimesMs); + + Severity severity; + if (tasks.length == 0) { + severity = Severity.NONE; + } else { + severity = getTaskMemoryUtilSeverity(taskPMemAvg, containerMem); + } + + HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(), + _heuristicConfData.getHeuristicName(), severity, Utils.getHeuristicScore(severity, tasks.length)); + + result.addResultDetail("Number of tasks", Integer.toString(tasks.length)); + result.addResultDetail("Avg task runtime", Statistics.readableTimespan(averageTimeMs)); + result.addResultDetail("Avg Physical Memory (MB)", Long.toString(taskPMemAvg / FileUtils.ONE_MB)); + result.addResultDetail("Max Physical Memory (MB)", Long.toString(taskPMax / FileUtils.ONE_MB)); + result.addResultDetail("Min Physical Memory (MB)", Long.toString(taskPMin / FileUtils.ONE_MB)); + result.addResultDetail("Avg Virtual Memory (MB)", Long.toString(taskVMemAvg / FileUtils.ONE_MB)); + result.addResultDetail("Requested Container Memory", FileUtils.byteCountToDisplaySize(containerMem)); + + return result; + } + + private Severity getTaskMemoryUtilSeverity(long taskMemAvg, long taskMemMax) { + double ratio = ((double)taskMemAvg) / taskMemMax; + Severity sevRatio = getMemoryRatioSeverity(ratio); + // Severity is reduced if the requested container memory is close to default + Severity sevMax = getContainerMemorySeverity(taskMemMax); + + return Severity.min(sevRatio, sevMax); + } + + + private Severity getContainerMemorySeverity(long taskMemMax) { + return Severity.getSeverityAscending( + taskMemMax, memoryLimits[0], memoryLimits[1], memoryLimits[2], memoryLimits[3]); + } + + private Severity getMemoryRatioSeverity(double ratio) { + return Severity.getSeverityDescending( + ratio, memRatioLimits[0], memRatioLimits[1], memRatioLimits[2], memRatioLimits[3]); + } +} diff --git a/app/com/linkedin/drelephant/mapreduce/heuristics/JobQueueLimitHeuristic.java b/app/com/linkedin/drelephant/mapreduce/heuristics/JobQueueLimitHeuristic.java new file mode 100644 index 000000000..0ca8c0ce8 --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/heuristics/JobQueueLimitHeuristic.java @@ -0,0 +1,120 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; + + +public class JobQueueLimitHeuristic implements Heuristic { + + private HeuristicConfigurationData _heuristicConfData; + + protected JobQueueLimitHeuristic(HeuristicConfigurationData heuristicConfData) { + this._heuristicConfData = heuristicConfData; + } + + @Override + public HeuristicConfigurationData getHeuristicConfData() { + return _heuristicConfData; + } + + @Override + public HeuristicResult apply(MapReduceApplicationData data) { + + HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(), + _heuristicConfData.getHeuristicName(), Severity.NONE, 0); + Properties jobConf = data.getConf(); + long queueTimeoutLimitMs = TimeUnit.MINUTES.toMillis(15); + + // Fetch the Queue to which the job is submitted. + String queueName = jobConf.getProperty("mapred.job.queue.name"); + if (queueName == null) { + throw new IllegalStateException("Queue Name not found."); + } + + // Compute severity if job is submitted to default queue else set severity to NONE. + MapReduceTaskData[] mapTasks = data.getMapperData(); + MapReduceTaskData[] redTasks = data.getReducerData(); + Severity[] mapTasksSeverity = new Severity[mapTasks.length]; + Severity[] redTasksSeverity = new Severity[redTasks.length]; + if (queueName.equals("default")) { + result.addResultDetail("Queue: ", queueName, null); + result.addResultDetail("Number of Map tasks", Integer.toString(mapTasks.length)); + result.addResultDetail("Number of Reduce tasks", Integer.toString(redTasks.length)); + + // Calculate Severity of Mappers + mapTasksSeverity = getTasksSeverity(mapTasks, queueTimeoutLimitMs); + result.addResultDetail("Number of Map tasks that are in severe state (14 to 14.5 min)", + Long.toString(getSeverityFrequency(Severity.SEVERE, mapTasksSeverity))); + result.addResultDetail("Number of Map tasks that are in critical state (over 14.5 min)", + Long.toString(getSeverityFrequency(Severity.CRITICAL, mapTasksSeverity))); + + // Calculate Severity of Reducers + redTasksSeverity = getTasksSeverity(redTasks, queueTimeoutLimitMs); + result.addResultDetail("Number of Reduce tasks that are in severe state (14 to 14.5 min)", + Long.toString(getSeverityFrequency(Severity.SEVERE, redTasksSeverity))); + result.addResultDetail("Number of Reduce tasks that are in critical state (over 14.5 min)", + Long.toString(getSeverityFrequency(Severity.CRITICAL, redTasksSeverity))); + + // Calculate Job severity + result.setSeverity(Severity.max(Severity.max(mapTasksSeverity), Severity.max(redTasksSeverity))); + + } else { + result.addResultDetail("Not Applicable", "This Heuristic is not applicable to " + queueName + " queue"); + result.setSeverity(Severity.NONE); + } + return result; + } + + private Severity[] getTasksSeverity(MapReduceTaskData[] tasks, long queueTimeout) { + Severity[] tasksSeverity = new Severity[tasks.length]; + int i = 0; + for (MapReduceTaskData task : tasks) { + tasksSeverity[i] = getQueueLimitSeverity(task.getTotalRunTimeMs(), queueTimeout); + i++; + } + return tasksSeverity; + } + + private long getSeverityFrequency(Severity severity, Severity[] tasksSeverity) { + long count = 0; + for (Severity taskSeverity : tasksSeverity) { + if (taskSeverity.equals(severity)) { + count++; + } + } + return count; + } + + private Severity getQueueLimitSeverity(long taskTime, long queueTimeout) { + long timeUnitMs = TimeUnit.SECONDS.toMillis(30); // 30s + if (queueTimeout == 0) { + return Severity.NONE; + } + return Severity.getSeverityAscending(taskTime, queueTimeout - 4 * timeUnitMs, queueTimeout - 3 * timeUnitMs, + queueTimeout - 2 * timeUnitMs, queueTimeout - timeUnitMs); + } + +} diff --git a/app/com/linkedin/drelephant/mapreduce/heuristics/MapperDataSkewHeuristic.java b/app/com/linkedin/drelephant/mapreduce/heuristics/MapperDataSkewHeuristic.java new file mode 100644 index 000000000..e598b6807 --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/heuristics/MapperDataSkewHeuristic.java @@ -0,0 +1,38 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; + + +/** + * This Heuristic analyses the skewness in the mapper input data + */ +public class MapperDataSkewHeuristic extends GenericDataSkewHeuristic { + + public MapperDataSkewHeuristic(HeuristicConfigurationData heuristicConfData) { + super(MapReduceCounterData.CounterName.HDFS_BYTES_READ, heuristicConfData); + } + + @Override + protected MapReduceTaskData[] getTasks(MapReduceApplicationData data) { + return data.getMapperData(); + } +} diff --git a/app/com/linkedin/drelephant/mapreduce/heuristics/MapperGCHeuristic.java b/app/com/linkedin/drelephant/mapreduce/heuristics/MapperGCHeuristic.java new file mode 100644 index 000000000..e57320128 --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/heuristics/MapperGCHeuristic.java @@ -0,0 +1,34 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; + + +public class MapperGCHeuristic extends GenericGCHeuristic { + + public MapperGCHeuristic(HeuristicConfigurationData heuristicConfData) { + super(heuristicConfData); + } + + @Override + protected MapReduceTaskData[] getTasks(MapReduceApplicationData data) { + return data.getMapperData(); + } +} diff --git a/app/com/linkedin/drelephant/mapreduce/heuristics/MapperMemoryHeuristic.java b/app/com/linkedin/drelephant/mapreduce/heuristics/MapperMemoryHeuristic.java new file mode 100644 index 000000000..b973b7450 --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/heuristics/MapperMemoryHeuristic.java @@ -0,0 +1,35 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; + + +public class MapperMemoryHeuristic extends GenericMemoryHeuristic { + public static final String MAPPER_MEMORY_CONF = "mapreduce.map.memory.mb"; + + public MapperMemoryHeuristic(HeuristicConfigurationData _heuristicConfData) { + super(MAPPER_MEMORY_CONF, _heuristicConfData); + } + + @Override + protected MapReduceTaskData[] getTasks(MapReduceApplicationData data) { + return data.getMapperData(); + } +} diff --git a/app/com/linkedin/drelephant/mapreduce/heuristics/MapperSpeedHeuristic.java b/app/com/linkedin/drelephant/mapreduce/heuristics/MapperSpeedHeuristic.java new file mode 100644 index 000000000..a80eb551d --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/heuristics/MapperSpeedHeuristic.java @@ -0,0 +1,152 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; + +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import com.linkedin.drelephant.util.Utils; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import com.linkedin.drelephant.analysis.HDFSContext; +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.math.Statistics; + +import java.util.Map; +import org.apache.commons.io.FileUtils; +import org.apache.log4j.Logger; + + +public class MapperSpeedHeuristic implements Heuristic { + private static final Logger logger = Logger.getLogger(MapperSpeedHeuristic.class); + + // Severity parameters. + private static final String DISK_SPEED_SEVERITY = "disk_speed_severity"; + private static final String RUNTIME_SEVERITY = "runtime_severity_in_min"; + + // Default value of parameters + private double[] diskSpeedLimits = {1d/2, 1d/4, 1d/8, 1d/32}; // Fraction of HDFS block size + private double[] runtimeLimits = {5, 10, 15, 30}; // The Map task runtime in milli sec + + private HeuristicConfigurationData _heuristicConfData; + + private void loadParameters() { + Map paramMap = _heuristicConfData.getParamMap(); + String heuristicName = _heuristicConfData.getHeuristicName(); + + double[] confDiskSpeedThreshold = Utils.getParam(paramMap.get(DISK_SPEED_SEVERITY), diskSpeedLimits.length); + if (confDiskSpeedThreshold != null) { + diskSpeedLimits = confDiskSpeedThreshold; + } + logger.info(heuristicName + " will use " + DISK_SPEED_SEVERITY + " with the following threshold settings: " + + Arrays.toString(diskSpeedLimits)); + for (int i = 0; i < diskSpeedLimits.length; i++) { + diskSpeedLimits[i] = diskSpeedLimits[i] * HDFSContext.DISK_READ_SPEED; + } + + double[] confRuntimeThreshold = Utils.getParam(paramMap.get(RUNTIME_SEVERITY), runtimeLimits.length); + if (confRuntimeThreshold != null) { + runtimeLimits = confRuntimeThreshold; + } + logger.info(heuristicName + " will use " + RUNTIME_SEVERITY + " with the following threshold settings: " + Arrays + .toString(runtimeLimits)); + for (int i = 0; i < runtimeLimits.length; i++) { + runtimeLimits[i] = runtimeLimits[i] * Statistics.MINUTE_IN_MS; + } + } + + public MapperSpeedHeuristic(HeuristicConfigurationData heuristicConfData) { + this._heuristicConfData = heuristicConfData; + loadParameters(); + } + + @Override + public HeuristicConfigurationData getHeuristicConfData() { + return _heuristicConfData; + } + + @Override + public HeuristicResult apply(MapReduceApplicationData data) { + + if(!data.getSucceeded()) { + return null; + } + + MapReduceTaskData[] tasks = data.getMapperData(); + + List inputByteSizes = new ArrayList(); + List speeds = new ArrayList(); + List runtimesMs = new ArrayList(); + + for (MapReduceTaskData task : tasks) { + + if (task.isTimeAndCounterDataPresent()) { + long inputBytes = task.getCounters().get(MapReduceCounterData.CounterName.HDFS_BYTES_READ); + long runtimeMs = task.getTotalRunTimeMs(); + inputByteSizes.add(inputBytes); + runtimesMs.add(runtimeMs); + //Speed is bytes per second + speeds.add((1000 * inputBytes) / (runtimeMs)); + } + } + + long medianSpeed; + long medianSize; + long medianRuntimeMs; + + if (tasks.length != 0) { + medianSpeed = Statistics.median(speeds); + medianSize = Statistics.median(inputByteSizes); + medianRuntimeMs = Statistics.median(runtimesMs); + } else { + medianSpeed = 0; + medianSize = 0; + medianRuntimeMs = 0; + } + + Severity severity = getDiskSpeedSeverity(medianSpeed); + + //This reduces severity if task runtime is insignificant + severity = Severity.min(severity, getRuntimeSeverity(medianRuntimeMs)); + + HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(), + _heuristicConfData.getHeuristicName(), severity, Utils.getHeuristicScore(severity, tasks.length)); + + result.addResultDetail("Number of tasks", Integer.toString(tasks.length)); + result.addResultDetail("Median task input size", FileUtils.byteCountToDisplaySize(medianSize)); + result.addResultDetail("Median task runtime", Statistics.readableTimespan(medianRuntimeMs)); + result.addResultDetail("Median task speed", FileUtils.byteCountToDisplaySize(medianSpeed) + "/s"); + + return result; + } + + private Severity getDiskSpeedSeverity(long speed) { + return Severity.getSeverityDescending( + speed, diskSpeedLimits[0], diskSpeedLimits[1], diskSpeedLimits[2], diskSpeedLimits[3]); + } + + private Severity getRuntimeSeverity(long runtimeMs) { + return Severity.getSeverityAscending( + runtimeMs, runtimeLimits[0], runtimeLimits[1], runtimeLimits[2], runtimeLimits[3]); + } +} diff --git a/app/com/linkedin/drelephant/mapreduce/heuristics/MapperSpillHeuristic.java b/app/com/linkedin/drelephant/mapreduce/heuristics/MapperSpillHeuristic.java new file mode 100644 index 000000000..c7caa6582 --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/heuristics/MapperSpillHeuristic.java @@ -0,0 +1,140 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import com.linkedin.drelephant.util.Utils; +import java.util.Arrays; +import java.util.Map; +import org.apache.log4j.Logger; + + +public class MapperSpillHeuristic implements Heuristic { + private static final Logger logger = Logger.getLogger(MapperSpillHeuristic.class); + private static final long THRESHOLD_SPILL_FACTOR = 10000; + + // Severity parameters. + private static final String SPILL_SEVERITY = "spill_severity"; + private static final String NUM_TASKS_SEVERITY = "num_tasks_severity"; + + // Default value of parameters + private double[] numTasksLimits = {50, 100, 500, 1000}; // Number of Map tasks. + private double[] spillLimits = {2.01d, 2.2d, 2.5d, 3.0d}; // Records spilled/total output records + + private HeuristicConfigurationData _heuristicConfData; + + private void loadParameters() { + Map paramMap = _heuristicConfData.getParamMap(); + String heuristicName = _heuristicConfData.getHeuristicName(); + + double[] confNumTasksThreshold = Utils.getParam(paramMap.get(NUM_TASKS_SEVERITY), numTasksLimits.length); + if (confNumTasksThreshold != null) { + numTasksLimits = confNumTasksThreshold; + } + logger.info(heuristicName + " will use " + NUM_TASKS_SEVERITY + " with the following threshold settings: " + + Arrays.toString(numTasksLimits)); + + double[] confSpillThreshold = Utils.getParam(paramMap.get(SPILL_SEVERITY), spillLimits.length); + if (confSpillThreshold != null) { + spillLimits = confSpillThreshold; + } + logger.info(heuristicName + " will use " + SPILL_SEVERITY + " with the following threshold settings: " + Arrays + .toString(spillLimits)); + for (int i = 0; i < spillLimits.length; i++) { + spillLimits[i] = spillLimits[i] * THRESHOLD_SPILL_FACTOR; + } + } + + public MapperSpillHeuristic(HeuristicConfigurationData heuristicConfData) { + this._heuristicConfData = heuristicConfData; + loadParameters(); + } + + @Override + public HeuristicConfigurationData getHeuristicConfData() { + return _heuristicConfData; + } + + @Override + public HeuristicResult apply(MapReduceApplicationData data) { + + if(!data.getSucceeded()) { + return null; + } + + MapReduceTaskData[] tasks = data.getMapperData(); + + long totalSpills = 0; + long totalOutputRecords = 0; + double ratioSpills = 0.0; + + for (MapReduceTaskData task : tasks) { + + if (task.isTimeAndCounterDataPresent()) { + totalSpills += task.getCounters().get(MapReduceCounterData.CounterName.SPILLED_RECORDS); + totalOutputRecords += task.getCounters().get(MapReduceCounterData.CounterName.MAP_OUTPUT_RECORDS); + } + } + + //If both totalSpills and totalOutputRecords are zero then set ratioSpills to zero. + if (totalSpills == 0) { + ratioSpills = 0; + } else { + ratioSpills = (double) totalSpills / (double) totalOutputRecords; + } + + Severity severity = getSpillSeverity(ratioSpills); + + // Severity is reduced if number of tasks is small + Severity taskSeverity = getNumTasksSeverity(tasks.length); + severity = Severity.min(severity, taskSeverity); + + HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(), + _heuristicConfData.getHeuristicName(), severity, Utils.getHeuristicScore(severity, tasks.length)); + + result.addResultDetail("Number of tasks", Integer.toString(tasks.length)); + result.addResultDetail("Avg spilled records per task", + tasks.length == 0 ? "0" : Long.toString(totalSpills / tasks.length)); + result.addResultDetail("Avg output records per task", + tasks.length == 0 ? "0" : Long.toString(totalOutputRecords / tasks.length)); + result.addResultDetail("Ratio of spilled records to output records", Double.toString(ratioSpills)); + + return result; + + } + + private Severity getSpillSeverity(double ratioSpills) { + + long normalizedSpillRatio = 0; + //Normalize the ratio to integer. + normalizedSpillRatio = (long) (ratioSpills * THRESHOLD_SPILL_FACTOR); + + return Severity.getSeverityAscending( + normalizedSpillRatio, spillLimits[0], spillLimits[1], spillLimits[2], spillLimits[3]); + } + + private Severity getNumTasksSeverity(long numTasks) { + return Severity.getSeverityAscending( + numTasks, numTasksLimits[0], numTasksLimits[1], numTasksLimits[2], numTasksLimits[3]); + } +} diff --git a/app/com/linkedin/drelephant/mapreduce/heuristics/MapperTimeHeuristic.java b/app/com/linkedin/drelephant/mapreduce/heuristics/MapperTimeHeuristic.java new file mode 100644 index 000000000..f47401c29 --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/heuristics/MapperTimeHeuristic.java @@ -0,0 +1,170 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import com.linkedin.drelephant.util.Utils; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.math.Statistics; + +import java.util.Map; +import org.apache.commons.io.FileUtils; +import org.apache.log4j.Logger; + + +public class MapperTimeHeuristic implements Heuristic { + private static final Logger logger = Logger.getLogger(MapperTimeHeuristic.class); + + // Severity parameters. + private static final String SHORT_RUNTIME_SEVERITY = "short_runtime_severity_in_min"; + private static final String LONG_RUNTIME_SEVERITY = "long_runtime_severity_in_min"; + private static final String NUM_TASKS_SEVERITY = "num_tasks_severity"; + + // Default value of parameters + private double[] shortRuntimeLimits = {10, 4, 2, 1}; // Limits(ms) for tasks with shorter runtime + private double[] longRuntimeLimits = {15, 30, 60, 120}; // Limits(ms) for tasks with longer runtime + private double[] numTasksLimits = {50, 101, 500, 1000}; // Number of Map tasks. + + private HeuristicConfigurationData _heuristicConfData; + + private void loadParameters() { + Map paramMap = _heuristicConfData.getParamMap(); + String heuristicName = _heuristicConfData.getHeuristicName(); + + double[] confShortThreshold = Utils.getParam(paramMap.get(SHORT_RUNTIME_SEVERITY), shortRuntimeLimits.length); + if (confShortThreshold != null) { + shortRuntimeLimits = confShortThreshold; + } + logger.info(heuristicName + " will use " + SHORT_RUNTIME_SEVERITY + " with the following threshold settings: " + + Arrays.toString(shortRuntimeLimits)); + for (int i = 0; i < shortRuntimeLimits.length; i++) { + shortRuntimeLimits[i] = shortRuntimeLimits[i] * Statistics.MINUTE_IN_MS; + } + + double[] confLongThreshold = Utils.getParam(paramMap.get(LONG_RUNTIME_SEVERITY), longRuntimeLimits.length); + if (confLongThreshold != null) { + longRuntimeLimits = confLongThreshold; + } + logger.info(heuristicName + " will use " + LONG_RUNTIME_SEVERITY + " with the following threshold settings: " + + Arrays.toString(longRuntimeLimits)); + for (int i = 0; i < longRuntimeLimits.length; i++) { + longRuntimeLimits[i] = longRuntimeLimits[i] * Statistics.MINUTE_IN_MS; + } + + double[] confNumTasksThreshold = Utils.getParam(paramMap.get(NUM_TASKS_SEVERITY), numTasksLimits.length); + if (confNumTasksThreshold != null) { + numTasksLimits = confNumTasksThreshold; + } + logger.info(heuristicName + " will use " + NUM_TASKS_SEVERITY + " with the following threshold settings: " + Arrays + .toString(numTasksLimits)); + } + + public MapperTimeHeuristic(HeuristicConfigurationData heuristicConfData) { + this._heuristicConfData = heuristicConfData; + loadParameters(); + } + + @Override + public HeuristicConfigurationData getHeuristicConfData() { + return _heuristicConfData; + } + + @Override + public HeuristicResult apply(MapReduceApplicationData data) { + + if(!data.getSucceeded()) { + return null; + } + + MapReduceTaskData[] tasks = data.getMapperData(); + + List inputBytes = new ArrayList(); + List runtimesMs = new ArrayList(); + long taskMinMs = Long.MAX_VALUE; + long taskMaxMs = 0; + + for (MapReduceTaskData task : tasks) { + + if (task.isTimeAndCounterDataPresent()) { + inputBytes.add(task.getCounters().get(MapReduceCounterData.CounterName.HDFS_BYTES_READ)); + long taskTime = task.getTotalRunTimeMs(); + runtimesMs.add(taskTime); + taskMinMs = Math.min(taskMinMs, taskTime); + taskMaxMs = Math.max(taskMaxMs, taskTime); + } + } + + if(taskMinMs == Long.MAX_VALUE) { + taskMinMs = 0; + } + + long averageSize = Statistics.average(inputBytes); + long averageTimeMs = Statistics.average(runtimesMs); + + Severity shortTaskSeverity = shortTaskSeverity(tasks.length, averageTimeMs); + Severity longTaskSeverity = longTaskSeverity(tasks.length, averageTimeMs); + Severity severity = Severity.max(shortTaskSeverity, longTaskSeverity); + + HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(), + _heuristicConfData.getHeuristicName(), severity, Utils.getHeuristicScore(severity, tasks.length)); + + result.addResultDetail("Number of tasks", Integer.toString(tasks.length)); + result.addResultDetail("Average task input size", FileUtils.byteCountToDisplaySize(averageSize)); + result.addResultDetail("Average task runtime", Statistics.readableTimespan(averageTimeMs)); + result.addResultDetail("Max task runtime", Statistics.readableTimespan(taskMaxMs)); + result.addResultDetail("Min task runtime", Statistics.readableTimespan(taskMinMs)); + + return result; + } + + private Severity shortTaskSeverity(long numTasks, long averageTimeMs) { + // We want to identify jobs with short task runtime + Severity severity = getShortRuntimeSeverity(averageTimeMs); + // Severity is reduced if number of tasks is small. + Severity numTaskSeverity = getNumTasksSeverity(numTasks); + return Severity.min(severity, numTaskSeverity); + } + + private Severity longTaskSeverity(long numTasks, long averageTimeMs) { + // We want to identify jobs with long task runtime. Severity is NOT reduced if num of tasks is large + return getLongRuntimeSeverity(averageTimeMs); + } + + private Severity getShortRuntimeSeverity(long runtimeMs) { + return Severity.getSeverityDescending( + runtimeMs, shortRuntimeLimits[0], shortRuntimeLimits[1], shortRuntimeLimits[2], shortRuntimeLimits[3]); + } + + private Severity getLongRuntimeSeverity(long runtimeMs) { + return Severity.getSeverityAscending( + runtimeMs, longRuntimeLimits[0], longRuntimeLimits[1], longRuntimeLimits[2], longRuntimeLimits[3]); + } + + private Severity getNumTasksSeverity(long numTasks) { + return Severity.getSeverityAscending( + numTasks, numTasksLimits[0], numTasksLimits[1], numTasksLimits[2], numTasksLimits[3]); + } +} diff --git a/app/com/linkedin/drelephant/mapreduce/heuristics/ReducerDataSkewHeuristic.java b/app/com/linkedin/drelephant/mapreduce/heuristics/ReducerDataSkewHeuristic.java new file mode 100644 index 000000000..19d21bd70 --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/heuristics/ReducerDataSkewHeuristic.java @@ -0,0 +1,38 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; + + +/** + * This Heuristic analyses the skewness in the reducer input data + */ +public class ReducerDataSkewHeuristic extends GenericDataSkewHeuristic { + + public ReducerDataSkewHeuristic(HeuristicConfigurationData heuristicConfData) { + super(MapReduceCounterData.CounterName.REDUCE_SHUFFLE_BYTES, heuristicConfData); + } + + @Override + protected MapReduceTaskData[] getTasks(MapReduceApplicationData data) { + return data.getReducerData(); + } +} diff --git a/app/com/linkedin/drelephant/mapreduce/heuristics/ReducerGCHeuristic.java b/app/com/linkedin/drelephant/mapreduce/heuristics/ReducerGCHeuristic.java new file mode 100644 index 000000000..a4891760c --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/heuristics/ReducerGCHeuristic.java @@ -0,0 +1,34 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; + + +public class ReducerGCHeuristic extends GenericGCHeuristic { + + public ReducerGCHeuristic(HeuristicConfigurationData _heuristicConfData) { + super(_heuristicConfData); + } + + @Override + protected MapReduceTaskData[] getTasks(MapReduceApplicationData data) { + return data.getReducerData(); + } +} diff --git a/app/com/linkedin/drelephant/mapreduce/heuristics/ReducerMemoryHeuristic.java b/app/com/linkedin/drelephant/mapreduce/heuristics/ReducerMemoryHeuristic.java new file mode 100644 index 000000000..1976e4637 --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/heuristics/ReducerMemoryHeuristic.java @@ -0,0 +1,35 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; + + +public class ReducerMemoryHeuristic extends GenericMemoryHeuristic { + public static final String REDUCER_MEMORY_CONF = "mapreduce.reduce.memory.mb"; + + public ReducerMemoryHeuristic(HeuristicConfigurationData _heuristicConfData) { + super(REDUCER_MEMORY_CONF, _heuristicConfData); + } + + @Override + protected MapReduceTaskData[] getTasks(MapReduceApplicationData data) { + return data.getReducerData(); + } +} diff --git a/app/com/linkedin/drelephant/mapreduce/heuristics/ReducerTimeHeuristic.java b/app/com/linkedin/drelephant/mapreduce/heuristics/ReducerTimeHeuristic.java new file mode 100644 index 000000000..a71604b5d --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/heuristics/ReducerTimeHeuristic.java @@ -0,0 +1,162 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import com.linkedin.drelephant.util.Utils; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.math.Statistics; +import java.util.Map; +import org.apache.log4j.Logger; + + +public class ReducerTimeHeuristic implements Heuristic { + private static final Logger logger = Logger.getLogger(ReducerTimeHeuristic.class); + + // Severity parameters. + private static final String SHORT_RUNTIME_SEVERITY = "short_runtime_severity_in_min"; + private static final String LONG_RUNTIME_SEVERITY = "long_runtime_severity_in_min"; + private static final String NUM_TASKS_SEVERITY = "num_tasks_severity"; + + // Default value of parameters + private double[] shortRuntimeLimits = {10, 4, 2, 1}; // Limits(ms) for tasks with shorter runtime + private double[] longRuntimeLimits = {15, 30, 60, 120}; // Limits(ms) for tasks with longer runtime + private double[] numTasksLimits = {50, 101, 500, 1000}; // Number of Reduce tasks. + + private HeuristicConfigurationData _heuristicConfData; + + private void loadParameters() { + Map paramMap = _heuristicConfData.getParamMap(); + String heuristicName = _heuristicConfData.getHeuristicName(); + + double[] confShortRuntimeLimits = Utils.getParam(paramMap.get(SHORT_RUNTIME_SEVERITY), shortRuntimeLimits.length); + if (confShortRuntimeLimits != null) { + shortRuntimeLimits = confShortRuntimeLimits; + } + logger.info(heuristicName + " will use " + SHORT_RUNTIME_SEVERITY + " with the following threshold settings: " + + Arrays.toString(shortRuntimeLimits)); + for (int i = 0; i < shortRuntimeLimits.length; i++) { + shortRuntimeLimits[i] = shortRuntimeLimits[i] * Statistics.MINUTE_IN_MS; + } + + double[] confLongRuntimeLimitss = Utils.getParam(paramMap.get(LONG_RUNTIME_SEVERITY), longRuntimeLimits.length); + if (confLongRuntimeLimitss != null) { + longRuntimeLimits = confLongRuntimeLimitss; + } + logger.info(heuristicName + " will use " + LONG_RUNTIME_SEVERITY + " with the following threshold settings: " + + Arrays.toString(longRuntimeLimits)); + for (int i = 0; i < longRuntimeLimits.length; i++) { + longRuntimeLimits[i] = longRuntimeLimits[i] * Statistics.MINUTE_IN_MS; + } + + double[] confNumTasksLimits = Utils.getParam(paramMap.get(NUM_TASKS_SEVERITY), numTasksLimits.length); + if (confNumTasksLimits != null) { + numTasksLimits = confNumTasksLimits; + } + logger.info(heuristicName + " will use " + NUM_TASKS_SEVERITY + " with the following threshold settings: " + Arrays + .toString(numTasksLimits)); + + } + + public ReducerTimeHeuristic(HeuristicConfigurationData heuristicConfData) { + this._heuristicConfData = heuristicConfData; + loadParameters(); + } + + @Override + public HeuristicConfigurationData getHeuristicConfData() { + return _heuristicConfData; + } + + @Override + public HeuristicResult apply(MapReduceApplicationData data) { + + if(!data.getSucceeded()) { + return null; + } + + MapReduceTaskData[] tasks = data.getReducerData(); + + List runTimesMs = new ArrayList(); + long taskMinMs = Long.MAX_VALUE; + long taskMaxMs = 0; + + for (MapReduceTaskData task : tasks) { + if (task.isTimeAndCounterDataPresent()) { + long taskTime = task.getTotalRunTimeMs(); + runTimesMs.add(taskTime); + taskMinMs = Math.min(taskMinMs, taskTime); + taskMaxMs = Math.max(taskMaxMs, taskTime); + } + } + + if(taskMinMs == Long.MAX_VALUE) { + taskMinMs = 0; + } + + //Analyze data + long averageRuntimeMs = Statistics.average(runTimesMs); + + Severity shortTimeSeverity = shortTimeSeverity(averageRuntimeMs, tasks.length); + Severity longTimeSeverity = longTimeSeverity(averageRuntimeMs, tasks.length); + Severity severity = Severity.max(shortTimeSeverity, longTimeSeverity); + + HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(), + _heuristicConfData.getHeuristicName(), severity, Utils.getHeuristicScore(severity, tasks.length)); + + result.addResultDetail("Number of tasks", Integer.toString(tasks.length)); + result.addResultDetail("Average task runtime", Statistics.readableTimespan(averageRuntimeMs)); + result.addResultDetail("Max task runtime", Statistics.readableTimespan(taskMaxMs)); + result.addResultDetail("Min task runtime", Statistics.readableTimespan(taskMinMs)); + return result; + } + + private Severity shortTimeSeverity(long runtimeMs, long numTasks) { + Severity timeSeverity = getShortRuntimeSeverity(runtimeMs); + // Severity is adjusted based on number of tasks + Severity taskSeverity = getNumTasksSeverity(numTasks); + return Severity.min(timeSeverity, taskSeverity); + } + + private Severity longTimeSeverity(long runtimeMs, long numTasks) { + // Severity is NOT adjusted based on number of tasks + return getLongRuntimeSeverity(runtimeMs); + } + + private Severity getShortRuntimeSeverity(long runtimeMs) { + return Severity.getSeverityDescending( + runtimeMs, shortRuntimeLimits[0], shortRuntimeLimits[1], shortRuntimeLimits[2], shortRuntimeLimits[3]); + } + + private Severity getLongRuntimeSeverity(long runtimeMs) { + return Severity.getSeverityAscending( + runtimeMs, longRuntimeLimits[0], longRuntimeLimits[1], longRuntimeLimits[2], longRuntimeLimits[3]); + } + + private Severity getNumTasksSeverity(long numTasks) { + return Severity.getSeverityAscending( + numTasks, numTasksLimits[0], numTasksLimits[1], numTasksLimits[2], numTasksLimits[3]); + } +} diff --git a/app/com/linkedin/drelephant/mapreduce/heuristics/ShuffleSortHeuristic.java b/app/com/linkedin/drelephant/mapreduce/heuristics/ShuffleSortHeuristic.java new file mode 100644 index 000000000..7d047313f --- /dev/null +++ b/app/com/linkedin/drelephant/mapreduce/heuristics/ShuffleSortHeuristic.java @@ -0,0 +1,140 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import com.linkedin.drelephant.util.Utils; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.math.Statistics; +import java.util.Map; +import org.apache.log4j.Logger; + + +/** + * Analyses the efficiency of Shuffle and Sort + */ +public class ShuffleSortHeuristic implements Heuristic { + private static final Logger logger = Logger.getLogger(ShuffleSortHeuristic.class); + + // Severity parameters. + private static final String RUNTIME_RATIO_SEVERITY = "runtime_ratio_severity"; + private static final String RUNTIME_SEVERITY = "runtime_severity_in_min"; + + // Default value of parameters + private double[] runtimeRatioLimits = {1, 2, 4, 8}; // Avg Shuffle or Sort Time * 2 / Avg Exec Time + private double[] runtimeLimits = {1, 5, 10, 30}; // Shuffle/Sort Runtime in milli sec + + private HeuristicConfigurationData _heuristicConfData; + + private void loadParameters() { + Map paramMap = _heuristicConfData.getParamMap(); + String heuristicName = _heuristicConfData.getHeuristicName(); + + double[] confRatioLimitsd = Utils.getParam(paramMap.get(RUNTIME_RATIO_SEVERITY), runtimeRatioLimits.length); + if (confRatioLimitsd != null) { + runtimeRatioLimits = confRatioLimitsd; + } + logger.info(heuristicName + " will use " + RUNTIME_RATIO_SEVERITY + " with the following threshold settings: " + + Arrays.toString(runtimeRatioLimits)); + + double[] confRuntimeLimits = Utils.getParam(paramMap.get(RUNTIME_SEVERITY), runtimeLimits.length); + if (confRuntimeLimits != null) { + runtimeLimits = confRuntimeLimits; + } + logger.info(heuristicName + " will use " + RUNTIME_SEVERITY + " with the following threshold settings: " + Arrays + .toString(runtimeLimits)); + for (int i = 0; i < runtimeLimits.length; i++) { + runtimeLimits[i] = runtimeLimits[i] * Statistics.MINUTE_IN_MS; + } + } + + public ShuffleSortHeuristic(HeuristicConfigurationData heuristicConfData) { + this._heuristicConfData = heuristicConfData; + loadParameters(); + } + + @Override + public HeuristicConfigurationData getHeuristicConfData() { + return _heuristicConfData; + } + + @Override + public HeuristicResult apply(MapReduceApplicationData data) { + + if(!data.getSucceeded()) { + return null; + } + + MapReduceTaskData[] tasks = data.getReducerData(); + + List execTimeMs = new ArrayList(); + List shuffleTimeMs = new ArrayList(); + List sortTimeMs = new ArrayList(); + + for (MapReduceTaskData task : tasks) { + if (task.isTimeAndCounterDataPresent()) { + execTimeMs.add(task.getCodeExecutionTimeMs()); + shuffleTimeMs.add(task.getShuffleTimeMs()); + sortTimeMs.add(task.getSortTimeMs()); + } + } + + //Analyze data + long avgExecTimeMs = Statistics.average(execTimeMs); + long avgShuffleTimeMs = Statistics.average(shuffleTimeMs); + long avgSortTimeMs = Statistics.average(sortTimeMs); + + Severity shuffleSeverity = getShuffleSortSeverity(avgShuffleTimeMs, avgExecTimeMs); + Severity sortSeverity = getShuffleSortSeverity(avgSortTimeMs, avgExecTimeMs); + Severity severity = Severity.max(shuffleSeverity, sortSeverity); + + HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(), + _heuristicConfData.getHeuristicName(), severity, Utils.getHeuristicScore(severity, tasks.length)); + + result.addResultDetail("Number of tasks", Integer.toString(data.getReducerData().length)); + result.addResultDetail("Average code runtime", Statistics.readableTimespan(avgExecTimeMs)); + String shuffleFactor = Statistics.describeFactor(avgShuffleTimeMs, avgExecTimeMs, "x"); + result.addResultDetail("Average shuffle time", Statistics.readableTimespan(avgShuffleTimeMs) + " " + shuffleFactor); + String sortFactor = Statistics.describeFactor(avgSortTimeMs, avgExecTimeMs, "x"); + result.addResultDetail("Average sort time", Statistics.readableTimespan(avgSortTimeMs) + " " + sortFactor); + + return result; + } + + private Severity getShuffleSortSeverity(long runtimeMs, long codetimeMs) { + Severity runtimeSeverity = Severity.getSeverityAscending( + runtimeMs, runtimeLimits[0], runtimeLimits[1], runtimeLimits[2], runtimeLimits[3]); + + if (codetimeMs <= 0) { + return runtimeSeverity; + } + long value = runtimeMs * 2 / codetimeMs; + + Severity runtimeRatioSeverity = Severity.getSeverityAscending( + value, runtimeRatioLimits[0], runtimeRatioLimits[1], runtimeRatioLimits[2], runtimeRatioLimits[3]); + + return Severity.min(runtimeSeverity, runtimeRatioSeverity); + } +} diff --git a/app/com/linkedin/drelephant/math/Statistics.java b/app/com/linkedin/drelephant/math/Statistics.java index 1a7cdad2c..de608278f 100644 --- a/app/com/linkedin/drelephant/math/Statistics.java +++ b/app/com/linkedin/drelephant/math/Statistics.java @@ -1,6 +1,20 @@ -package com.linkedin.drelephant.math; +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ -import com.linkedin.drelephant.analysis.Severity; +package com.linkedin.drelephant.math; import java.lang.reflect.Array; import java.util.ArrayList; @@ -8,173 +22,268 @@ import java.util.Collections; import java.util.List; import java.util.Random; +import org.apache.commons.io.FileUtils; + + +/** + * This class includes all the statistical operations + */ +public final class Statistics { + + public static final long SECOND_IN_MS = 1000L; + public static final long MINUTE_IN_MS = 60L * SECOND_IN_MS; + public static final long HOUR_IN_MS = 60L * MINUTE_IN_MS; + + public static long MINUTE = 60L; + public static long HOUR = 60*MINUTE; + + private Statistics() { + } + + /** + * Check if the array has deviating elements. + *

+ * Deviating elements are found by comparing each individual value against the average. + * + * @param values the array of values to check + * @param buffer the amount to ignore as a buffer for smaller valued lists + * @param factor the amount of allowed deviation is calculated from average * factor + * @return the index of the deviating value, or -1 if + */ + public static int[] deviates(long[] values, long buffer, double factor) { + if (values == null || values.length == 0) { + return new int[0]; + } + + long avg = average(values); + + //Find deviated elements + + long minimumDiff = Math.max(buffer, (long) (avg * factor)); + List deviatedElements = new ArrayList(); + + for (int i = 0; i < values.length; i++) { + long diff = values[i] - avg; + if (diff > minimumDiff) { + deviatedElements.add(i); + } + } + + int[] result = new int[deviatedElements.size()]; + for (int i = 0; i < result.length; i++) { + result[i] = deviatedElements.get(i); + } + + return result; + } + + /** + * The percentile method returns the least value from the given list which has at least given percentile. + * @param values The list of values to find the percentile from + * @param percentile The percentile + * @return The least value from the list with at least the given percentile + */ + public static long percentile(List values, int percentile) { + + if (values.size() == 0) { + throw new IllegalArgumentException("Percentile of empty list is not defined."); + } + + if (percentile > 100 || percentile < 0) { + throw new IllegalArgumentException("Percentile has to be between 0-100"); + } + + if (percentile == 0) { + return 0; + } + + Collections.sort(values); + + // Use Nearest Rank method. + // https://en.wikipedia.org/wiki/Percentile#The_Nearest_Rank_method + int position = (int) Math.ceil(values.size() * percentile / 100.0); + + // should never happen. + if (position == 0) { + return values.get(position); + } + + // position is always one greater than index. Return value at the proper index + return values.get(position - 1); + } -public class Statistics { - - public static final long SECOND = 1000L; - public static final long MINUTE = 60L * SECOND; - public static final long HOUR = 60L * MINUTE; - - /** - * Check if the array has deviating elements. - *

- * Deviating elements are found by comparing each individual value against the average. - * - * @param values the array of values to check - * @param buffer the amount to ignore as a buffer for smaller valued lists - * @param factor the amount of allowed deviation is calculated from average * factor - * @return the index of the deviating value, or -1 if - */ - public static int[] deviates(long[] values, long buffer, double factor) { - if (values == null || values.length == 0) { - return new int[0]; - } - - long avg = average(values); - - //Find deviated elements - - long minimumDiff = Math.max(buffer, (long) (avg * factor)); - List deviatedElements = new ArrayList(); - - for (int i = 0; i < values.length; i++) { - long diff = values[i] - avg; - if (diff > minimumDiff) { - deviatedElements.add(i); - } - } - - int[] result = new int[deviatedElements.size()]; - for (int i = 0; i < result.length; i++) { - result[i] = deviatedElements.get(i); - } - - return result; - } - - public static long[][] findTwoGroups(long[] values) { - return findTwoGroupsRecursive(values, average(values), 2); - } - - public static long[][] findTwoGroupsRecursive(long[] values, long middle, int levels) { - if (levels > 0) { - long[][] result = two_means(values, middle); - long newMiddle = average(result[1]) - average(result[0]); - return findTwoGroupsRecursive(values, newMiddle, levels - 1); - } - return two_means(values, middle); - } - - private static long[][] two_means(long[] values, long middle) { - List smaller = new ArrayList(); - List larger = new ArrayList(); - for (int i = 0; i < values.length; i++) { - if (values[i] < middle) { - smaller.add(values[i]); - } else { - larger.add(values[i]); - } - } - - long[][] result = new long[2][]; - result[0] = toArray(smaller); - result[1] = toArray(larger); - - return result; - } - - private static long[] toArray(List input) { - long[] result = new long[input.size()]; - for (int i = 0; i < result.length; i++) { - result[i] = input.get(i); - } - return result; - } - - public static long average(long[] values) { - //Find average - double sum = 0d; - for (long value : values) { - sum += value; - } - return (long) (sum / (double) values.length); - } - - public static long average(List values) { - //Find average - double sum = 0d; - for (long value : values) { - sum += value; + + public static long[][] findTwoGroups(long[] values) { + return findTwoGroupsRecursive(values, average(values), 2); + } + + public static long[][] findTwoGroupsRecursive(long[] values, long middle, int levels) { + if (levels > 0) { + long[][] result = twoMeans(values, middle); + long newMiddle = average(result[1]) - average(result[0]); + return findTwoGroupsRecursive(values, newMiddle, levels - 1); + } + return twoMeans(values, middle); + } + + private static long[][] twoMeans(long[] values, long middle) { + List smaller = new ArrayList(); + List larger = new ArrayList(); + for (int i = 0; i < values.length; i++) { + if (values[i] < middle) { + smaller.add(values[i]); + } else { + larger.add(values[i]); } - return (long) (sum / (double) values.size()); } - public static String describeFactor(long value, long compare, String suffix) { - double factor = (double) value / (double) compare; - if (Double.isNaN(factor)) { - return ""; - } - return "(" + String.format("%.2f", factor) + suffix + ")"; + long[][] result = new long[2][]; + result[0] = toArray(smaller); + result[1] = toArray(larger); + + return result; + } + + private static long[] toArray(List input) { + long[] result = new long[input.size()]; + for (int i = 0; i < result.length; i++) { + result[i] = input.get(i); } + return result; + } - public static String readableTimespan(long milliseconds) { - long seconds = milliseconds / 1000; - long minutes = seconds / 60; - long hours = minutes / 60; - minutes %= 60; - seconds %= 60; - StringBuilder sb = new StringBuilder(); - if (hours > 0) { - sb.append(hours).append("hr "); - } - if (minutes > 0) { - sb.append(minutes).append("min "); - } - if (seconds > 0) { - sb.append(seconds).append("sec "); - } - return sb.toString().trim(); + /** + * Compute average for the given array of long + * + * @param values the values + * @return The average(values) + */ + public static long average(long[] values) { + //Find average + double sum = 0d; + for (long value : values) { + sum += value; } + return (long) (sum / (double) values.length); + } - public static Severity getNumTasksSeverity(long numTasks) { - return Severity.getSeverityAscending(numTasks, - 10, 50, 100, 200); + /** + * Compute average for a List of long values + * + * @param values the values + * @return The average(values) + */ + public static long average(List values) { + //Find average + double sum = 0d; + for (long value : values) { + sum += value; } + return (long) (sum / (double) values.size()); + } - public static T[] createSample(Class clazz, T[] objects, int size) { - //Skip this process if number of items already smaller than sample size - if (objects.length <= size) { - return objects; - } + /** + * Find the median of the given list + * + * @param values The values + * @return The median(values) + */ + public static long median(List values) { + if (values.size() == 0) { + throw new IllegalArgumentException("Median of an empty list is not defined."); + } + Collections.sort(values); + int middle = values.size() / 2; + if (values.size() % 2 == 0) { + return (values.get(middle - 1) + values.get(middle)) / 2; + } else { + return values.get(middle); + } + } - @SuppressWarnings("unchecked") - T[] result = (T[]) Array.newInstance(clazz, size); + /** + * Compute ratio and display it with a suffix. + * + * Example: Average sort time (0.14x) + * + * @param value The value to be compared + * @param compare The value compared against + * @param suffix The suffix string + * @return The ratio followed by suffix + */ + public static String describeFactor(long value, long compare, String suffix) { + double factor = (double) value / (double) compare; + if (Double.isNaN(factor)) { + return ""; + } + return "(" + String.format("%.2f", factor) + suffix + ")"; + } - //Shuffle a clone copy - T[] clone = objects.clone(); - Collections.shuffle(Arrays.asList(clone)); + /** + * Convert milliseconds to readable value + * + * @param milliseconds The number of milliseconds + * @return A String of readable time + */ + public static String readableTimespan(long milliseconds) { + if (milliseconds == 0) { + return "0 sec"; + } - //Take the first n items - System.arraycopy(clone, 0, result, 0, size); + long seconds = milliseconds / 1000; + long minutes = seconds / 60; + long hours = minutes / 60; + minutes %= 60; + seconds %= 60; + StringBuilder sb = new StringBuilder(); + if (hours > 0) { + sb.append(hours).append(" hr "); + } + if (minutes > 0) { + sb.append(minutes).append(" min "); + } + if (seconds > 0) { + sb.append(seconds).append(" sec "); + } + return sb.toString().trim(); + } - return result; + public static T[] createSample(Class clazz, T[] objects, int size) { + //Skip this process if number of items already smaller than sample size + if (objects.length <= size) { + return objects; } - // Create a random sample within the original array - public static void shuffleArraySample(T[] array, int sampleSize) { - if(array.length <= sampleSize) { - return; - } + @SuppressWarnings("unchecked") + T[] result = (T[]) Array.newInstance(clazz, size); + + //Shuffle a clone copy + T[] clone = objects.clone(); + Collections.shuffle(Arrays.asList(clone)); + + //Take the first n items + System.arraycopy(clone, 0, result, 0, size); + + return result; + } + + /** + * Create a random sample within the original array + */ + public static void shuffleArraySample(T[] array, int sampleSize) { + if (array.length <= sampleSize) { + return; + } - T temp; - int index; - Random random = new Random(); + T temp; + int index; + Random random = new Random(); - for (int i = 0; i com.codahale.metrics.jvm.GarbageCollectorMetricSet. + * + *

+ * The following custom guages are added. + *

    jvmUptime - The time since the JVM was started.
+ *
    gc2UptimeRatio - The ratio of GC collection times to JVM uptime. Collection + * times for both young gen and perm gen are counted.
+ */ +public class CustomGarbageCollectorMetricSet implements MetricSet { + private static final Pattern WHITESPACE = Pattern.compile("[\\s]+"); + + private final List garbageCollectors; + + /** + * Creates a new set of gauges for all discoverable garbage collectors. + */ + public CustomGarbageCollectorMetricSet() { + this(ManagementFactory.getGarbageCollectorMXBeans()); + } + + /** + * Creates a new set of gauges for the given collection of garbage collectors. + * + * @param garbageCollectors the garbage collectors + */ + public CustomGarbageCollectorMetricSet(Collection garbageCollectors) { + this.garbageCollectors = new ArrayList(garbageCollectors); + } + + /** + * @return Returns a map of defined gauges. + */ + @Override + public Map getMetrics() { + final Map gauges = new HashMap(); + + long cumulativeGCTime = 0L; + + for (final GarbageCollectorMXBean gc : garbageCollectors) { + final String name = WHITESPACE.matcher(gc.getName()).replaceAll("-"); + + gauges.put(name(name, "count"), new Gauge() { + @Override + public Long getValue() { + return gc.getCollectionCount(); + } + }); + + gauges.put(name(name, "time"), new Gauge() { + @Override + public Long getValue() { + return gc.getCollectionTime(); + } + }); + + cumulativeGCTime += gc.getCollectionTime(); + } + + final long uptime = ManagementFactory.getRuntimeMXBean().getUptime(); + final Double gc2UptimeRatio = (double)cumulativeGCTime / uptime; + + gauges.put("jvmUptime", new Gauge() { + @Override + public Long getValue() { + return uptime; + } + }); + + gauges.put("gc2UptimeRatio", new Gauge() { + @Override + public Double getValue() { + return gc2UptimeRatio; + } + }); + + return Collections.unmodifiableMap(gauges); + } +} diff --git a/app/com/linkedin/drelephant/notifications/EmailThread.java b/app/com/linkedin/drelephant/notifications/EmailThread.java deleted file mode 100644 index 8eacbbe89..000000000 --- a/app/com/linkedin/drelephant/notifications/EmailThread.java +++ /dev/null @@ -1,103 +0,0 @@ -package com.linkedin.drelephant.notifications; - -import com.linkedin.drelephant.analysis.Severity; -import model.JobResult; -import org.apache.commons.mail.DefaultAuthenticator; -import org.apache.commons.mail.EmailException; -import org.apache.commons.mail.HtmlEmail; -import play.Play; -import views.html.emailcritical; - -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.atomic.AtomicBoolean; - -public class EmailThread extends Thread { - - private LinkedBlockingQueue resultQueue; - private AtomicBoolean running = new AtomicBoolean(true); - - private String smtpHost; - private int smtpPort; - private String smtpFrom; - private DefaultAuthenticator authenticator = null; - - public EmailThread() { - setName("Email Thread"); - resultQueue = new LinkedBlockingQueue(); - smtpHost = Play.application().configuration().getString("smtp.host"); - smtpPort = Play.application().configuration().getInt("smtp.port"); - String smtpUser = Play.application().configuration().getString("smtp.user"); - String smtpPassword = Play.application().configuration().getString("smtp.password"); - if (smtpUser != null && !smtpUser.isEmpty()) { - authenticator = new DefaultAuthenticator(smtpUser, smtpPassword); - } - smtpFrom = Play.application().configuration().getString("smtp.from"); - } - - @Override - public void run() { - while (running.get()) { - JobResult result = null; - while (result == null && running.get()) { - try { - result = resultQueue.take(); - } catch (InterruptedException e) { - //Ignored - } - } - if (!running.get()) { - return; - } - Severity worstSeverity = result.severity; - if (worstSeverity == Severity.CRITICAL) { - //Send email right away - sendCriticalEmail(result); - } else if (worstSeverity == Severity.SEVERE) { - //Keep track of a digest and send in intervals - } - } - } - - public void kill() { - running.set(false); - this.interrupt(); - } - - public void enqueue(JobResult result) { - try { - resultQueue.put(result); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - - private void sendCriticalEmail(JobResult result) { - try { - //Generate content - String html = emailcritical.render(result).body(); - - //Send email - HtmlEmail email = new HtmlEmail(); - email.setHostName(smtpHost); - email.setSmtpPort(smtpPort); - if (authenticator != null) { - email.setAuthenticator(authenticator); - } - email.setSSLOnConnect(true); - email.setFrom(smtpFrom); - email.addTo(result.username + "@linkedin.com"); - email.setSubject("Dr. Elephant - Hadoop Job Status Notification"); - email.setHtmlMsg(html); - email.setDebug(true); - /////////////////// - // - // WARNING: This next line will send out the emails. - // Do NOT uncomment before proper testing and mental meditation. - // - /////////////////// - //email.send(); - } catch (EmailException e) { - e.printStackTrace(); - } - } -} diff --git a/app/com/linkedin/drelephant/schedulers/AirflowScheduler.java b/app/com/linkedin/drelephant/schedulers/AirflowScheduler.java new file mode 100644 index 000000000..075c01ef6 --- /dev/null +++ b/app/com/linkedin/drelephant/schedulers/AirflowScheduler.java @@ -0,0 +1,137 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.schedulers; + +import com.linkedin.drelephant.configurations.scheduler.SchedulerConfigurationData; +import com.linkedin.drelephant.util.Utils; + +import java.util.Properties; +import org.apache.log4j.Logger; + + +/** + * This class provides methods to load information specific to the Airflow scheduler. + */ +public class AirflowScheduler implements Scheduler { + + private static final Logger logger = Logger.getLogger(AirflowScheduler.class); + + public static final String AIRFLOW_TASK_ID = "airflow.ctx.task.task_id"; + public static final String AIRFLOW_TASK_INSTANCE_EXECUTION_DATE = "airflow.ctx.task_instance.execution_date"; + public static final String AIRFLOW_DAG_ID = "airflow.ctx.dag.dag_id"; + public static final String AIRFLOW_DAG_RUN_EXECUTION_DATE = "airflow.ctx.dag_run.execution_date"; + + public static final String AIRFLOW_BASE_URL_PARAM_NAME = "airflowbaseurl"; + private static final String AIRFLOW_BASE_URL_DEFAULT = "http://localhost:8000"; + + private String _schedulerName; + private String _taskId; + private String _taskInstanceExecutionDate; + private String _dagId; + private String _dagRunExecutionDate; + private int _subdagDepth; + private String _baseUrl; + + + public AirflowScheduler(String appId, Properties properties, SchedulerConfigurationData schedulerConfData) { + _schedulerName = schedulerConfData.getSchedulerName(); + _baseUrl = schedulerConfData.getParamMap().get(AIRFLOW_BASE_URL_PARAM_NAME); + if (_baseUrl == null) { + _baseUrl = AIRFLOW_BASE_URL_DEFAULT; + } + + if (properties != null) { + loadInfo(appId, properties); + } else { + // Use default value of data type + } + } + + private void loadInfo(String appId, Properties properties) { + // examples: + // my_amazing_task_id + _taskId = properties.getProperty(AIRFLOW_TASK_ID); + // 2016-06-27T01:30:00 + _taskInstanceExecutionDate = properties.getProperty(AIRFLOW_TASK_INSTANCE_EXECUTION_DATE); + // my_amazing_dag_id + _dagId = properties.getProperty(AIRFLOW_DAG_ID); // + // 2016-06-27T00:00:00 + _dagRunExecutionDate = properties.getProperty(AIRFLOW_DAG_RUN_EXECUTION_DATE); + + _subdagDepth = 0; // TODO: Add sub-dag support + } + + @Override + public String getSchedulerName() { + return _schedulerName; + } + + @Override + public boolean isEmpty() { + return _taskId == null || _taskInstanceExecutionDate == null || _dagId == null || _dagRunExecutionDate == null; + } + + @Override + public String getJobDefId() { + return Utils.formatStringOrNull("%s/%s", _dagId, _taskId); + } + + @Override + public String getJobExecId() { + return Utils.formatStringOrNull("%s/%s/%s/%s", _dagId, _dagRunExecutionDate, _taskId, _taskInstanceExecutionDate); + } + + @Override + public String getFlowDefId() { + return Utils.formatStringOrNull("%s", _dagId); + } + + @Override + public String getFlowExecId() { + return Utils.formatStringOrNull("%s/%s", _dagId, _dagRunExecutionDate); + } + + @Override + public String getJobDefUrl() { + return Utils.formatStringOrNull("%s/admin/airflow/code?dag_id=%s&task_id=%s", _baseUrl, _dagId, _taskId); + } + + @Override + public String getJobExecUrl() { + return Utils.formatStringOrNull("%s/admin/airflow/log?dag_id=%s&task_id=%s&execution_date=%s", + _baseUrl, _dagId, _taskId, _taskInstanceExecutionDate); + + } + + @Override + public String getFlowDefUrl() { + return Utils.formatStringOrNull("%s/admin/airflow/graph?dag_id=%s", _baseUrl, _dagId); + } + + @Override + public String getFlowExecUrl() { + return Utils.formatStringOrNull("%s/admin/airflow/graph?dag_id=%s&execution_date=%s", _baseUrl, _dagId, _dagRunExecutionDate); + } + + @Override + public int getWorkflowDepth() { + return _subdagDepth; + } + + @Override + public String getJobName() { return _taskId; } +} diff --git a/app/com/linkedin/drelephant/schedulers/AzkabanScheduler.java b/app/com/linkedin/drelephant/schedulers/AzkabanScheduler.java new file mode 100644 index 000000000..ad18c71e3 --- /dev/null +++ b/app/com/linkedin/drelephant/schedulers/AzkabanScheduler.java @@ -0,0 +1,137 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.schedulers; + +import com.linkedin.drelephant.configurations.scheduler.SchedulerConfigurationData; +import java.util.Properties; +import org.apache.log4j.Logger; + + +/** + * This class provides methods to load information specific to the Azkaban scheduler. + */ +public class AzkabanScheduler implements Scheduler { + + private static final Logger logger = Logger.getLogger(AzkabanScheduler.class); + + public static final String AZKABAN_WORKFLOW_URL = "azkaban.link.workflow.url"; + public static final String AZKABAN_JOB_URL = "azkaban.link.job.url"; + public static final String AZKABAN_EXECUTION_URL = "azkaban.link.execution.url"; + public static final String AZKABAN_ATTEMPT_URL = "azkaban.link.attempt.url"; + public static final String AZKABAN_JOB_NAME = "azkaban.job.id"; + + private String schedulerName; + private String jobDefId; + private String jobExecId; + private String flowDefId; + private String flowExecId; + + private String jobDefUrl; + private String jobExecUrl; + private String flowDefUrl; + private String flowExecUrl; + + private String jobName; + private int workflowDepth; + + + public AzkabanScheduler(String appId, Properties properties, SchedulerConfigurationData schedulerConfData) { + schedulerName = schedulerConfData.getSchedulerName(); + if (properties != null) { + loadInfo(appId, properties); + } else { + // Use default value of data type + } + } + + private void loadInfo(String appId, Properties properties) { + // Update the 4 Ids + jobDefId = properties.getProperty(AZKABAN_JOB_URL); + jobExecId = properties.getProperty(AZKABAN_ATTEMPT_URL); + flowDefId = properties.getProperty(AZKABAN_WORKFLOW_URL); + flowExecId = properties.getProperty(AZKABAN_EXECUTION_URL); + + // For Azkaban, The url and ids are the same + jobExecUrl = jobExecId; + jobDefUrl = jobDefId; + flowExecUrl = flowExecId; + flowDefUrl = flowDefId; + + workflowDepth = 0; // TODO: Add sub-workflow support + jobName = properties.getProperty(AZKABAN_JOB_NAME); + } + + @Override + public String getSchedulerName() { + return schedulerName; + } + + @Override + public boolean isEmpty() { + return jobDefId == null || jobExecId == null || flowDefId == null || flowExecId == null; + } + + @Override + public String getJobDefId() { + return jobDefId; + } + + @Override + public String getJobExecId() { + return jobExecId; + } + + @Override + public String getFlowDefId() { + return flowDefId; + } + + @Override + public String getFlowExecId() { + return flowExecId; + } + + @Override + public String getJobDefUrl() { + return jobDefUrl; + } + + @Override + public String getJobExecUrl() { + return jobExecUrl; + } + + @Override + public String getFlowDefUrl() { + return flowDefUrl; + } + + @Override + public String getFlowExecUrl() { + return flowExecUrl; + } + + @Override + public int getWorkflowDepth() { + return workflowDepth; + } + + @Override + public String getJobName() { + return jobName; + } +} \ No newline at end of file diff --git a/app/com/linkedin/drelephant/schedulers/NoScheduler.java b/app/com/linkedin/drelephant/schedulers/NoScheduler.java new file mode 100644 index 000000000..b3130c7df --- /dev/null +++ b/app/com/linkedin/drelephant/schedulers/NoScheduler.java @@ -0,0 +1,129 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.schedulers; + +import com.linkedin.drelephant.configurations.scheduler.SchedulerConfigurationData; +import java.util.Properties; +import org.apache.log4j.Logger; + + +/** + * This class provides methods to load information about workflow when you don't have scheduler. + * We use the paragdim one job is one flow + */ +public class NoScheduler implements Scheduler { + + private static final Logger logger = Logger.getLogger(NoScheduler.class); + + private String schedulerName; + private String jobDefId; + private String jobExecId; + private String flowDefId; + private String flowExecId; + + private String jobDefUrl; + private String jobExecUrl; + private String flowDefUrl; + private String flowExecUrl; + + private String jobName; + private int workflowDepth; + + + public NoScheduler(String appId, Properties properties, SchedulerConfigurationData schedulerConfData) { + schedulerName = schedulerConfData.getSchedulerName(); + if (properties != null) { + loadInfo(appId, properties); + } else { + // Use default value of data type + } + } + + private void loadInfo(String appId, Properties properties) { + workflowDepth = 0; // No sub-workflow support + + jobName = properties.getProperty("mapreduce.job.name"); + String workflowName = properties.getProperty("mapreduce.workflow.name"); + if (workflowName == null) { + return; + } + jobDefId = properties.getProperty("mapreduce.job.user.name") + ":" + workflowName; + jobExecId = properties.getProperty("mapreduce.workflow.id"); + flowDefId = jobDefId; + flowExecId = jobExecId; + } + + @Override + public String getSchedulerName() { + return schedulerName; + } + + @Override + public boolean isEmpty() { + return jobDefId == null || jobExecId == null || flowDefId == null || flowExecId == null; + } + + @Override + public String getJobDefId() { + return jobDefId; + } + + @Override + public String getJobExecId() { + return jobExecId; + } + + @Override + public String getFlowDefId() { + return flowDefId; + } + + @Override + public String getFlowExecId() { + return flowExecId; + } + + @Override + public String getJobDefUrl() { + return jobDefUrl; + } + + @Override + public String getJobExecUrl() { + return jobExecUrl; + } + + @Override + public String getFlowDefUrl() { + return flowDefUrl; + } + + @Override + public String getFlowExecUrl() { + return flowExecUrl; + } + + @Override + public int getWorkflowDepth() { + return workflowDepth; + } + + @Override + public String getJobName() { + return jobName; + } +} diff --git a/app/com/linkedin/drelephant/schedulers/OozieScheduler.java b/app/com/linkedin/drelephant/schedulers/OozieScheduler.java new file mode 100644 index 000000000..6e78a0932 --- /dev/null +++ b/app/com/linkedin/drelephant/schedulers/OozieScheduler.java @@ -0,0 +1,264 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.schedulers; + +import com.linkedin.drelephant.configurations.scheduler.SchedulerConfigurationData; +import com.linkedin.drelephant.util.Utils; +import org.apache.log4j.Logger; +import org.apache.oozie.client.*; +import org.apache.commons.lang.StringUtils; +import java.util.Properties; + + +/** + * This class provides methods to load information specific to the Oozie scheduler. + */ +public class OozieScheduler implements Scheduler { + + private static final Logger logger = Logger.getLogger(OozieScheduler.class); + + private static final String OOZIE_ACTION_ID = "oozie.action.id"; + + private static final String OOZIE_API_URL = "oozie_api_url"; + private static final String OOZIE_AUTH_OPTION = "oozie_auth_option"; + private static final String OOZIE_JOB_DEF_URL_TEMPLATE = "oozie_job_url_template"; + private static final String OOZIE_JOB_EXEC_URL_TEMPLATE = "oozie_job_exec_url_template"; + private static final String OOZIE_WORKFLOW_DEF_URL_TEMPLATE = "oozie_workflow_url_template"; + private static final String OOZIE_WORKFLOW_EXEC_URL_TEMPLATE = "oozie_workflow_exec_url_template"; + private static final String OOZIE_APP_NAME_UNIQUENESS = "oozie_app_name_uniqueness"; + private boolean appNameUniqueness; + + private String schedulerName; + private String jobDefId; + private String jobExecId; + private String flowExecId; + private String flowDefId; + private String jobDefIdUrl; + private String jobExecIdUrl; + private String flowExecIdUrl; + private String flowDefIdUrl; + private int workflowDepth; + + private OozieClient oozieClient; + private String jobDefUrlTemplate; + private String jobExecUrlTemplate; + private String workflowDefUrlTemplate; + private String workflowExecUrlTemplate; + private String flowDefName; + + public OozieScheduler(String appId, Properties properties, SchedulerConfigurationData schedulerConfData) { + this(appId, properties, schedulerConfData, null); + } + + public OozieScheduler(String appId, Properties properties, SchedulerConfigurationData schedulerConfData, OozieClient oozieClient) { + schedulerName = schedulerConfData.getSchedulerName(); + + if (properties != null && properties.getProperty(OOZIE_ACTION_ID) != null) { + this.oozieClient = oozieClient == null ? makeOozieClient(schedulerConfData) : oozieClient; + jobDefUrlTemplate = schedulerConfData.getParamMap().get(OOZIE_JOB_DEF_URL_TEMPLATE); + jobExecUrlTemplate = schedulerConfData.getParamMap().get(OOZIE_JOB_EXEC_URL_TEMPLATE); + workflowDefUrlTemplate = schedulerConfData.getParamMap().get(OOZIE_WORKFLOW_DEF_URL_TEMPLATE); + workflowExecUrlTemplate = schedulerConfData.getParamMap().get(OOZIE_WORKFLOW_EXEC_URL_TEMPLATE); + String appNameUniquenessStr = schedulerConfData.getParamMap().get(OOZIE_APP_NAME_UNIQUENESS); + appNameUniqueness = appNameUniquenessStr != null && Boolean.parseBoolean(appNameUniquenessStr); + + loadInfo(properties); + } + + // Use default value of data type + } + + private void loadInfo(Properties properties) { + // 0004167-160629080632562-oozie-oozi-W@some-action + String actionId = properties.getProperty(OOZIE_ACTION_ID); + + if (actionId.contains("@")) { + String workflowId = extractId(actionId); + + WorkflowJob workflow; + try { + logger.info("Fetching Oozie workflow info for " + workflowId); + + workflow = oozieClient.getJobInfo(workflowId); + logger.info("Oozie workflow for " + workflowId + ": " + workflow); + + String superParentId = getSuperParentId(workflow); + logger.info("Oozie super parent for: " + workflowId + ": " + superParentId); + + jobExecId = workflow.getId(); + jobExecIdUrl = workflow.getConsoleUrl(); + jobDefIdUrl = workflow.getConsoleUrl(); + flowExecId = superParentId; + + if (isCoordinatorJob(superParentId)) { + coordinatedJobInfo(workflow, actionId, superParentId); + } else { + manualCommittedJob(workflow, actionId, superParentId); + } + } catch (OozieClientException e) { + throw new RuntimeException("Failed fetching Oozie workflow " + workflowId + " info", e); + } + } + } + + private void manualCommittedJob(WorkflowJob workflow, String actionId, String superParentId) throws OozieClientException { + logger.info("Oozie workflow " + actionId + " was manually submitted"); + WorkflowJob flowDefWorkflow = oozieClient.getJobInfo(extractId(superParentId)); + flowDefIdUrl = flowDefWorkflow.getConsoleUrl(); + flowExecIdUrl = flowDefWorkflow.getConsoleUrl(); + if (appNameUniqueness) { + jobDefId = workflow.getAppName() + "-" + extractAction(actionId); + flowDefId = superParentId; + flowDefName = flowDefWorkflow.getAppName(); + } else { + jobDefId = workflow.getId(); + flowDefId = superParentId; + } + } + + private void coordinatedJobInfo(WorkflowJob workflow, String actionId, String superParentId) throws OozieClientException { + logger.info("Oozie workflow " + actionId + " is scheduled with coordinator"); + CoordinatorJob flowDefCoordinator = oozieClient.getCoordJobInfo(extractId(superParentId)); + flowDefIdUrl = flowDefCoordinator.getConsoleUrl(); + flowExecIdUrl = flowDefCoordinator.getConsoleUrl(); + if (appNameUniqueness) { + jobDefId = workflow.getAppName() + "-" + extractAction(actionId); + flowDefId = extractId(superParentId); + flowDefName = flowDefCoordinator.getAppName(); + } else { + jobDefId = extractId(superParentId) + "-" + extractAction(actionId) + "-" + workflowDepth; + flowDefId = extractId(superParentId); + } + } + + private String extractId(String idAndAction) { + return idAndAction.split("@")[0]; + } + + private String extractAction(String idAndAction) { + return idAndAction.split("@")[1]; + } + + private String getSuperParentId(WorkflowJob workflow) throws OozieClientException { + + WorkflowJob current = workflow; + workflowDepth = 0; + + while (hasParent(current)) { + if (isCoordinatorJob(current.getParentId())) { + return current.getParentId(); + } + current = oozieClient.getJobInfo(current.getParentId()); + + workflowDepth++; + } + + return current.getId(); + } + + private boolean hasParent(WorkflowJob workflow) { + return StringUtils.isNotEmpty(workflow.getParentId()); + } + + private boolean isCoordinatorJob(String workflowId) { + return workflowId != null && extractId(workflowId).endsWith("C"); + } + + private OozieClient makeOozieClient(SchedulerConfigurationData schedulerConfData) { + String oozieApiUrl = schedulerConfData.getParamMap().get(OOZIE_API_URL); + String authOption = schedulerConfData.getParamMap().get(OOZIE_AUTH_OPTION); + if (oozieApiUrl == null) { + throw new RuntimeException("Missing " + OOZIE_API_URL + " param for Oozie Scheduler"); + } + + return new AuthOozieClient(oozieApiUrl, authOption); + } + + private String getUrl(String idUrl, String id, String urlTemplate, String propertyName) { + String url; + if (urlTemplate != null) { + url = Utils.formatStringOrNull(urlTemplate, id); + } else if (idUrl != null) { + url = idUrl; + } else { + logger.warn("Missing " + propertyName + " param for Oozie Scheduler"); + url = id; + } + + return url; + } + + @Override + public String getSchedulerName() { + return schedulerName; + } + + @Override + public boolean isEmpty() { + return schedulerName == null || jobDefId == null || jobExecId == null || flowDefId == null || flowExecId == null; + } + + @Override + public String getJobDefId() { + return Utils.formatStringOrNull("%s", jobDefId); + } + + @Override + public String getJobExecId() { + return Utils.formatStringOrNull("%s", jobExecId); + } + + @Override + public String getFlowDefId() { + return Utils.formatStringOrNull("%s", appNameUniqueness ? flowDefName : flowDefId); + } + + @Override + public String getFlowExecId() { + return Utils.formatStringOrNull("%s", flowExecId); + } + + @Override + public String getJobDefUrl() { + return getUrl(jobDefIdUrl, jobDefId, jobDefUrlTemplate, OOZIE_JOB_DEF_URL_TEMPLATE); + } + + @Override + public String getJobExecUrl() { + return getUrl(jobExecIdUrl, jobExecId, jobExecUrlTemplate, OOZIE_JOB_EXEC_URL_TEMPLATE); + } + + @Override + public String getFlowDefUrl() { + return getUrl(flowDefIdUrl, flowDefId, workflowDefUrlTemplate, OOZIE_WORKFLOW_DEF_URL_TEMPLATE); + } + + @Override + public String getFlowExecUrl() { + return getUrl(flowExecIdUrl, flowExecId, workflowExecUrlTemplate, OOZIE_WORKFLOW_EXEC_URL_TEMPLATE); + } + + @Override + public int getWorkflowDepth() { + return workflowDepth; + } + + @Override + public String getJobName() { + return jobDefId; + } +} diff --git a/app/com/linkedin/drelephant/schedulers/Scheduler.java b/app/com/linkedin/drelephant/schedulers/Scheduler.java new file mode 100644 index 000000000..241d3b276 --- /dev/null +++ b/app/com/linkedin/drelephant/schedulers/Scheduler.java @@ -0,0 +1,108 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.schedulers; + + +/** + * Scheduler interface defining the + */ +public interface Scheduler { + + /** + * Return the Scheduler Name + * + * @return the scheduler name + */ + public String getSchedulerName(); + + /** + * True if the the scheduler object was not able to parse the given properties + * + * @return true the scheduler is empty + */ + public boolean isEmpty(); + + /** + * Return the Job Definition Id of the job in the workflow + * + * @return the job definition id + */ + public String getJobDefId(); + + /** + * Return the Job Execution Id of the job in the workflow + * + * @return the job execution id + */ + public String getJobExecId(); + + /** + * Return the Flow Definition Id of the workflow + * + * @return the flow definition id + */ + public String getFlowDefId(); + + /** + * Return the Flow Execution Id of the workflow + * + * @return the flow execution id + */ + public String getFlowExecId(); + + /** + * Return a link to the job's definition + * + * @return the job definition url + */ + public String getJobDefUrl(); + + /** + * Return a link to the job's execution + * + * @return the job execution url + */ + public String getJobExecUrl(); + + /** + * Return a link to the flow's definition + * + * @return the flow definition url + */ + public String getFlowDefUrl(); + + /** + * Return a link to the flow's execution + * + * @return the flow execution url + */ + public String getFlowExecUrl(); + + /** + * Return the name of the Job/Action in the Flow + * + * @return the job/action name + */ + public String getJobName(); + + /** + * Return the workflow depth + * + * @return the workflow depth + */ + public int getWorkflowDepth(); +} diff --git a/app/com/linkedin/drelephant/security/HadoopSecurity.java b/app/com/linkedin/drelephant/security/HadoopSecurity.java new file mode 100644 index 000000000..9c63b2965 --- /dev/null +++ b/app/com/linkedin/drelephant/security/HadoopSecurity.java @@ -0,0 +1,112 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.security; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.log4j.Logger; + +import java.io.File; +import java.io.IOException; +import java.security.PrivilegedAction; + + +/** + * The class handles authentication when cluster is security enabled + */ +public class HadoopSecurity { + private static final Logger logger = Logger.getLogger(HadoopSecurity.class); + + private UserGroupInformation _loginUser = null; + + private String _keytabLocation; + private String _keytabUser; + private boolean _securityEnabled = false; + + + private static HadoopSecurity instance = null; + + public static HadoopSecurity getInstance() throws IOException{ + if(instance==null) { + return new HadoopSecurity(); + } + return instance; + } + + public HadoopSecurity() throws IOException { + Configuration conf = new Configuration(); + UserGroupInformation.setConfiguration(conf); + _securityEnabled = UserGroupInformation.isSecurityEnabled(); + if (_securityEnabled) { + logger.info("This cluster is Kerberos enabled."); + boolean login = true; + + _keytabUser = System.getProperty("keytab.user"); + if (_keytabUser == null) { + logger.error("Keytab user not set. Please set keytab_user in the configuration file"); + login = false; + } + + _keytabLocation = System.getProperty("keytab.location"); + if (_keytabLocation == null) { + logger.error("Keytab location not set. Please set keytab_location in the configuration file"); + login = false; + } else if (!new File(_keytabLocation).exists()) { + logger.error("The keytab file at location [" + _keytabLocation + "] does not exist."); + login = false; + } + + if (!login) { + throw new IOException("Cannot login. This cluster is security enabled."); + } + + checkLogin(); + } + } + + public UserGroupInformation getUGI() throws IOException { + checkLogin(); + return _loginUser; + } + + public void checkLogin() throws IOException { + + if (_loginUser == null) { + logger.info("No login user. Creating login user"); + logger.info("Logging with " + _keytabUser + " and " + _keytabLocation); + UserGroupInformation.loginUserFromKeytab(_keytabUser, _keytabLocation); + _loginUser = UserGroupInformation.getLoginUser(); + logger.info("Logged in with user " + _loginUser); + if(UserGroupInformation.isLoginKeytabBased()) { + logger.info("Login is keytab based"); + } else { + logger.info("Login is not keytab based"); + } + } else { + _loginUser.checkTGTAndReloginFromKeytab(); + } + + } + + public T doAs(PrivilegedAction action) throws IOException { + UserGroupInformation ugi = getUGI(); + if (ugi != null) { + return ugi.doAs(action); + } + return null; + } +} diff --git a/app/com/linkedin/drelephant/spark/SparkMetricsAggregator.scala b/app/com/linkedin/drelephant/spark/SparkMetricsAggregator.scala new file mode 100644 index 000000000..9dc1ac5ae --- /dev/null +++ b/app/com/linkedin/drelephant/spark/SparkMetricsAggregator.scala @@ -0,0 +1,118 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.spark + +import com.linkedin.drelephant.analysis.{HadoopAggregatedData, HadoopApplicationData, HadoopMetricsAggregator} +import com.linkedin.drelephant.configurations.aggregator.AggregatorConfigurationData +import com.linkedin.drelephant.math.Statistics +import com.linkedin.drelephant.spark.data.{SparkApplicationData, SparkLogDerivedData, SparkRestDerivedData} +import com.linkedin.drelephant.util.MemoryFormatUtils +import org.apache.commons.io.FileUtils +import org.apache.log4j.Logger +import scala.util.Try + + +class SparkMetricsAggregator(private val aggregatorConfigurationData: AggregatorConfigurationData) + extends HadoopMetricsAggregator { + import SparkMetricsAggregator._ + + private val logger: Logger = Logger.getLogger(classOf[SparkMetricsAggregator]) + + private val allocatedMemoryWasteBufferPercentage: Double = + Option(aggregatorConfigurationData.getParamMap.get(ALLOCATED_MEMORY_WASTE_BUFFER_PERCENTAGE_KEY)) + .flatMap { value => Try(value.toDouble).toOption } + .getOrElse(DEFAULT_ALLOCATED_MEMORY_WASTE_BUFFER_PERCENTAGE) + + private val hadoopAggregatedData: HadoopAggregatedData = new HadoopAggregatedData() + + override def getResult(): HadoopAggregatedData = hadoopAggregatedData + + override def aggregate(data: HadoopApplicationData): Unit = data match { + case (data: SparkApplicationData) => aggregate(data) + case _ => throw new IllegalArgumentException("data should be SparkApplicationData") + } + + private def aggregate(data: SparkApplicationData): Unit = for { + executorInstances <- executorInstancesOf(data) + executorMemoryBytes <- executorMemoryBytesOf(data) + } { + val applicationDurationMillis = applicationDurationMillisOf(data) + val totalExecutorTaskTimeMillis = totalExecutorTaskTimeMillisOf(data) + + val resourcesAllocatedForUse = + aggregateresourcesAllocatedForUse(executorInstances, executorMemoryBytes, applicationDurationMillis) + val resourcesActuallyUsed = aggregateresourcesActuallyUsed(executorMemoryBytes, totalExecutorTaskTimeMillis) + + val resourcesActuallyUsedWithBuffer = resourcesActuallyUsed.doubleValue() * (1.0 + allocatedMemoryWasteBufferPercentage) + val resourcesWastedMBSeconds = (resourcesActuallyUsedWithBuffer < resourcesAllocatedForUse.doubleValue()) match { + case true => resourcesAllocatedForUse.doubleValue() - resourcesActuallyUsedWithBuffer + case false => 0.0 + } + //allocated is the total used resource from the cluster. + if (resourcesAllocatedForUse.isValidLong) { + hadoopAggregatedData.setResourceUsed(resourcesAllocatedForUse.toLong) + } else { + logger.info(s"resourcesAllocatedForUse exceeds Long.MaxValue: ${resourcesAllocatedForUse }") + } + + hadoopAggregatedData.setResourceWasted(resourcesWastedMBSeconds.toLong) + } + + private def aggregateresourcesActuallyUsed(executorMemoryBytes: Long, totalExecutorTaskTimeMillis: BigInt): BigInt = { + val bytesMillis = BigInt(executorMemoryBytes) * totalExecutorTaskTimeMillis + (bytesMillis / (BigInt(FileUtils.ONE_MB) * BigInt(Statistics.SECOND_IN_MS))) + } + + private def aggregateresourcesAllocatedForUse( + executorInstances: Int, + executorMemoryBytes: Long, + applicationDurationMillis: Long + ): BigInt = { + val bytesMillis = BigInt(executorInstances) * BigInt(executorMemoryBytes) * BigInt(applicationDurationMillis) + (bytesMillis / (BigInt(FileUtils.ONE_MB) * BigInt(Statistics.SECOND_IN_MS))) + } + + private def executorInstancesOf(data: SparkApplicationData): Option[Int] = { + val appConfigurationProperties = data.appConfigurationProperties + appConfigurationProperties.get(SPARK_EXECUTOR_INSTANCES_KEY).map(_.toInt) + } + + private def executorMemoryBytesOf(data: SparkApplicationData): Option[Long] = { + val appConfigurationProperties = data.appConfigurationProperties + appConfigurationProperties.get(SPARK_EXECUTOR_MEMORY_KEY).map(MemoryFormatUtils.stringToBytes) + } + + private def applicationDurationMillisOf(data: SparkApplicationData): Long = { + require(data.applicationInfo.attempts.nonEmpty) + val lastApplicationAttemptInfo = data.applicationInfo.attempts.last + lastApplicationAttemptInfo.endTime.getTime - lastApplicationAttemptInfo.startTime.getTime + } + + private def totalExecutorTaskTimeMillisOf(data: SparkApplicationData): BigInt = { + data.executorSummaries.map { executorSummary => BigInt(executorSummary.totalDuration) }.sum + } +} + +object SparkMetricsAggregator { + /** The percentage of allocated memory we expect to waste because of overhead. */ + val DEFAULT_ALLOCATED_MEMORY_WASTE_BUFFER_PERCENTAGE = 0.5D + + val ALLOCATED_MEMORY_WASTE_BUFFER_PERCENTAGE_KEY = "allocated_memory_waste_buffer_percentage" + + val SPARK_EXECUTOR_INSTANCES_KEY = "spark.executor.instances" + val SPARK_EXECUTOR_MEMORY_KEY = "spark.executor.memory" +} diff --git a/app/com/linkedin/drelephant/spark/data/SparkApplicationData.scala b/app/com/linkedin/drelephant/spark/data/SparkApplicationData.scala new file mode 100644 index 000000000..6e6ac5972 --- /dev/null +++ b/app/com/linkedin/drelephant/spark/data/SparkApplicationData.scala @@ -0,0 +1,70 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.spark.data + +import java.util.Properties + +import scala.collection.JavaConverters + +import com.linkedin.drelephant.analysis.{ApplicationType, HadoopApplicationData} +import com.linkedin.drelephant.spark.fetchers.statusapiv1.{ApplicationInfo, ExecutorSummary, JobData, StageData} + + +case class SparkApplicationData( + appId: String, + appConfigurationProperties: Map[String, String], + applicationInfo: ApplicationInfo, + jobDatas: Seq[JobData], + stageDatas: Seq[StageData], + executorSummaries: Seq[ExecutorSummary] +) extends HadoopApplicationData { + import SparkApplicationData._ + import JavaConverters._ + + override def getApplicationType(): ApplicationType = APPLICATION_TYPE + + override def getConf(): Properties = { + val properties = new Properties() + properties.putAll(appConfigurationProperties.asJava) + properties + } + + override def getAppId(): String = appId + + // This instance will always have data, or at least the data the Spark REST API gives us. + override def isEmpty(): Boolean = false +} + +object SparkApplicationData { + val APPLICATION_TYPE = new ApplicationType("SPARK") + + def apply( + appId: String, + restDerivedData: SparkRestDerivedData, + logDerivedData: Option[SparkLogDerivedData] + ): SparkApplicationData = { + val appConfigurationProperties: Map[String, String] = + logDerivedData + .flatMap { _.environmentUpdate.environmentDetails.get("Spark Properties").map(_.toMap) } + .getOrElse(Map.empty) + val applicationInfo = restDerivedData.applicationInfo + val jobDatas = restDerivedData.jobDatas + val stageDatas = restDerivedData.stageDatas + val executorSummaries = restDerivedData.executorSummaries + apply(appId, appConfigurationProperties, applicationInfo, jobDatas, stageDatas, executorSummaries) + } +} diff --git a/app/com/linkedin/drelephant/spark/data/SparkLogDerivedData.scala b/app/com/linkedin/drelephant/spark/data/SparkLogDerivedData.scala new file mode 100644 index 000000000..537234a5c --- /dev/null +++ b/app/com/linkedin/drelephant/spark/data/SparkLogDerivedData.scala @@ -0,0 +1,24 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.spark.data + +import org.apache.spark.scheduler.SparkListenerEnvironmentUpdate + + +case class SparkLogDerivedData(environmentUpdate: SparkListenerEnvironmentUpdate) { + def appConfigurationProperties: Map[String, String] = environmentUpdate.environmentDetails("Spark Properties").toMap +} diff --git a/app/com/linkedin/drelephant/spark/data/SparkRestDerivedData.scala b/app/com/linkedin/drelephant/spark/data/SparkRestDerivedData.scala new file mode 100644 index 000000000..721c4f3c9 --- /dev/null +++ b/app/com/linkedin/drelephant/spark/data/SparkRestDerivedData.scala @@ -0,0 +1,27 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.spark.data + +import com.linkedin.drelephant.spark.fetchers.statusapiv1.{ApplicationInfo, ExecutorSummary, JobData, StageData} + + +case class SparkRestDerivedData( + applicationInfo: ApplicationInfo, + jobDatas: Seq[JobData], + stageDatas: Seq[StageData], + executorSummaries: Seq[ExecutorSummary] +) diff --git a/app/com/linkedin/drelephant/spark/fetchers/SparkFetcher.scala b/app/com/linkedin/drelephant/spark/fetchers/SparkFetcher.scala new file mode 100644 index 000000000..0cf703f23 --- /dev/null +++ b/app/com/linkedin/drelephant/spark/fetchers/SparkFetcher.scala @@ -0,0 +1,101 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.spark.fetchers + +import scala.async.Async +import scala.concurrent.{Await, ExecutionContext, Future} +import scala.concurrent.duration.{Duration, SECONDS} +import scala.util.Try +import scala.util.control.NonFatal + +import com.linkedin.drelephant.analysis.{AnalyticJob, ElephantFetcher} +import com.linkedin.drelephant.configurations.fetcher.FetcherConfigurationData +import com.linkedin.drelephant.spark.data.{SparkApplicationData, SparkLogDerivedData, SparkRestDerivedData} +import com.linkedin.drelephant.util.SparkUtils +import org.apache.hadoop.conf.Configuration +import org.apache.log4j.Logger +import org.apache.spark.SparkConf + + +/** + * A fetcher that gets Spark-related data from a combination of the Spark monitoring REST API and Spark event logs. + */ +class SparkFetcher(fetcherConfigurationData: FetcherConfigurationData) + extends ElephantFetcher[SparkApplicationData] { + import SparkFetcher._ + import ExecutionContext.Implicits.global + + private val logger: Logger = Logger.getLogger(classOf[SparkFetcher]) + + private[fetchers] lazy val hadoopConfiguration: Configuration = new Configuration() + + private[fetchers] lazy val sparkUtils: SparkUtils = SparkUtils + + private[fetchers] lazy val sparkConf: SparkConf = { + val sparkConf = new SparkConf() + sparkUtils.getDefaultPropertiesFile(sparkUtils.defaultEnv) match { + case Some(filename) => sparkConf.setAll(sparkUtils.getPropertiesFromFile(filename)) + case None => throw new IllegalStateException("can't find Spark conf; please set SPARK_HOME or SPARK_CONF_DIR") + } + sparkConf + } + + private[fetchers] lazy val sparkRestClient: SparkRestClient = new SparkRestClient(sparkConf) + + private[fetchers] lazy val sparkLogClient: Option[SparkLogClient] = { + val eventLogEnabled = sparkConf.getBoolean(SPARK_EVENT_LOG_ENABLED_KEY, false) + if (eventLogEnabled) Some(new SparkLogClient(hadoopConfiguration, sparkConf)) else None + } + + override def fetchData(analyticJob: AnalyticJob): SparkApplicationData = { + val appId = analyticJob.getAppId + logger.info(s"Fetching data for ${appId}") + try { + Await.result(doFetchData(sparkRestClient, sparkLogClient, appId), DEFAULT_TIMEOUT) + } catch { + case NonFatal(e) => + logger.error(s"Failed fetching data for ${appId}", e) + throw e + } + } +} + +object SparkFetcher { + import Async.{async, await} + + val SPARK_EVENT_LOG_ENABLED_KEY = "spark.eventLog.enabled" + val DEFAULT_TIMEOUT = Duration(30, SECONDS) + + private def doFetchData( + sparkRestClient: SparkRestClient, + sparkLogClient: Option[SparkLogClient], + appId: String + )( + implicit ec: ExecutionContext + ): Future[SparkApplicationData] = async { + val restDerivedData = await(sparkRestClient.fetchData(appId)) + val lastAttemptId = restDerivedData.applicationInfo.attempts.maxBy { _.startTime }.attemptId + + // Would use .map but await doesn't like that construction. + val logDerivedData = sparkLogClient match { + case Some(sparkLogClient) => Some(await(sparkLogClient.fetchData(appId, lastAttemptId))) + case None => None + } + + SparkApplicationData(appId, restDerivedData, logDerivedData) + } +} diff --git a/app/com/linkedin/drelephant/spark/fetchers/SparkLogClient.scala b/app/com/linkedin/drelephant/spark/fetchers/SparkLogClient.scala new file mode 100644 index 000000000..75fcbff78 --- /dev/null +++ b/app/com/linkedin/drelephant/spark/fetchers/SparkLogClient.scala @@ -0,0 +1,237 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.spark.fetchers + +import java.io.{BufferedInputStream, FileNotFoundException, InputStream} +import java.net.URI + +import scala.async.Async +import scala.collection.mutable.HashMap +import scala.concurrent.{ExecutionContext, Future} +import scala.io.Source + +import com.linkedin.drelephant.spark.data.SparkLogDerivedData +import org.apache.hadoop.conf.Configuration +import org.apache.hadoop.fs.{FileSystem, Path} +import org.apache.log4j.Logger +import org.apache.spark.SparkConf +import org.apache.spark.io.{CompressionCodec, LZ4CompressionCodec, LZFCompressionCodec, SnappyCompressionCodec} +import org.apache.spark.scheduler.{SparkListenerEnvironmentUpdate, SparkListenerEvent} +import org.json4s.{DefaultFormats, JsonAST} +import org.json4s.jackson.JsonMethods + + +/** + * A client for getting data from the Spark event logs, using the location configured for spark.eventLog.dir. + * + * This client uses webhdfs to access the location, even if spark.eventLog.dir is provided as an hdfs URL. + * + * The codecs used by this client use JNI, which results in some weird classloading issues (at least when testing in the console), + * so some of the client's implementation is non-lazy or synchronous when needed. + */ +class SparkLogClient(hadoopConfiguration: Configuration, sparkConf: SparkConf) { + import SparkLogClient._ + import Async.async + + private val logger: Logger = Logger.getLogger(classOf[SparkLogClient]) + + private[fetchers] val webhdfsEventLogUri: URI = { + val eventLogUri = sparkConf.getOption(SPARK_EVENT_LOG_DIR_KEY).map(new URI(_)) + val dfsNamenodeHttpAddress = Option(hadoopConfiguration.get(HADOOP_DFS_NAMENODE_HTTP_ADDRESS_KEY)) + (eventLogUri, dfsNamenodeHttpAddress) match { + case (Some(eventLogUri), _) if eventLogUri.getScheme == "webhdfs" => + eventLogUri + case (Some(eventLogUri), Some(dfsNamenodeHttpAddress)) if eventLogUri.getScheme == "hdfs" => + val dfsNamenodeHttpUri = new URI(null, dfsNamenodeHttpAddress, null, null, null) + new URI(s"webhdfs://${eventLogUri.getHost}:${dfsNamenodeHttpUri.getPort}${eventLogUri.getPath}") + case _ => + throw new IllegalArgumentException( + s"""|${SPARK_EVENT_LOG_DIR_KEY} must be provided as webhdfs:// or hdfs://; + |if hdfs, ${HADOOP_DFS_NAMENODE_HTTP_ADDRESS_KEY} must also be provided for port""".stripMargin.replaceAll("\n", " ") + ) + } + } + + private[fetchers] lazy val fs: FileSystem = FileSystem.get(webhdfsEventLogUri, hadoopConfiguration) + + private lazy val shouldCompress = sparkConf.getBoolean("spark.eventLog.compress", defaultValue = false) + private lazy val compressionCodec = if (shouldCompress) Some(compressionCodecFromConf(sparkConf)) else None + private lazy val compressionCodecShortName = compressionCodec.map(shortNameOfCompressionCodec) + + def fetchData(appId: String, attemptId: Option[String])(implicit ec: ExecutionContext): Future[SparkLogDerivedData] = { + val logPath = getLogPath(webhdfsEventLogUri, appId, attemptId, compressionCodecShortName) + logger.info(s"looking for logs at ${logPath}") + + val codec = compressionCodecForLogPath(sparkConf, logPath) + + // Limit scope of async. + async { + resource.managed { openEventLog(sparkConf, logPath, fs) } + .acquireAndGet { in => findDerivedData(codec.map { _.compressedInputStream(in) }.getOrElse(in)) } + } + } +} + +object SparkLogClient { + import JsonAST._ + + val SPARK_EVENT_LOG_DIR_KEY = "spark.eventLog.dir" + val HADOOP_DFS_NAMENODE_HTTP_ADDRESS_KEY = "dfs.namenode.http-address" + + private implicit val formats: DefaultFormats = DefaultFormats + + def findDerivedData(in: InputStream, eventsLimit: Option[Int] = None): SparkLogDerivedData = { + val events = eventsLimit.map { getEvents(in).take(_) }.getOrElse { getEvents(in) } + + var environmentUpdate: Option[SparkListenerEnvironmentUpdate] = None + while (events.hasNext && environmentUpdate.isEmpty) { + val event = events.next + event match { + case Some(eu: SparkListenerEnvironmentUpdate) => environmentUpdate = Some(eu) + case _ => {} // Do nothing. + } + } + + environmentUpdate + .map(SparkLogDerivedData(_)) + .getOrElse { throw new IllegalArgumentException("Spark event log doesn't have Spark properties") } + } + + private def getEvents(in: InputStream): Iterator[Option[SparkListenerEvent]] = getLines(in).map(lineToEvent) + + private def getLines(in: InputStream): Iterator[String] = Source.fromInputStream(in).getLines + + private def lineToEvent(line: String): Option[SparkListenerEvent] = sparkEventFromJson(JsonMethods.parse(line)) + + // Below this line are modified utility methods from: + // + // https://github.com/apache/spark/blob/v1.4.1/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala + // https://github.com/apache/spark/blob/v1.4.1/core/src/main/scala/org/apache/spark/util/JsonProtocol.scala + // https://github.com/apache/spark/blob/v1.4.1/core/src/main/scala/org/apache/spark/util/Utils.scala + + private val IN_PROGRESS = ".inprogress" + private val DEFAULT_COMPRESSION_CODEC = "snappy" + + private val compressionCodecClassNamesByShortName = Map( + "lz4" -> classOf[LZ4CompressionCodec].getName, + "lzf" -> classOf[LZFCompressionCodec].getName, + "snappy" -> classOf[SnappyCompressionCodec].getName + ) + + // A cache for compression codecs to avoid creating the same codec many times + private val compressionCodecMap = HashMap.empty[String, CompressionCodec] + + private def compressionCodecFromConf(conf: SparkConf): CompressionCodec = { + val codecName = conf.get("spark.io.compression.codec", DEFAULT_COMPRESSION_CODEC) + loadCompressionCodec(conf, codecName) + } + + private def loadCompressionCodec(conf: SparkConf, codecName: String): CompressionCodec = { + val codecClass = compressionCodecClassNamesByShortName.getOrElse(codecName.toLowerCase, codecName) + val classLoader = Option(Thread.currentThread().getContextClassLoader).getOrElse(getClass.getClassLoader) + val codec = try { + val ctor = Class.forName(codecClass, true, classLoader).getConstructor(classOf[SparkConf]) + Some(ctor.newInstance(conf).asInstanceOf[CompressionCodec]) + } catch { + case e: ClassNotFoundException => None + case e: IllegalArgumentException => None + } + codec.getOrElse(throw new IllegalArgumentException(s"Codec [$codecName] is not available. ")) + } + + private def shortNameOfCompressionCodec(compressionCodec: CompressionCodec): String = { + val codecName = compressionCodec.getClass.getName + if (compressionCodecClassNamesByShortName.contains(codecName)) { + codecName + } else { + compressionCodecClassNamesByShortName + .collectFirst { case (k, v) if v == codecName => k } + .getOrElse { throw new IllegalArgumentException(s"No short name for codec $codecName.") } + } + } + + private def getLogPath( + logBaseDir: URI, + appId: String, + appAttemptId: Option[String], + compressionCodecName: Option[String] = None + ): Path = { + val base = logBaseDir.toString.stripSuffix("/") + "/" + sanitize(appId) + val codec = compressionCodecName.map("." + _).getOrElse("") + if (appAttemptId.isDefined) { + new Path(base + "_" + sanitize(appAttemptId.get) + codec) + } else { + new Path(base + codec) + } + } + + private def openEventLog(conf: SparkConf, logPath: Path, fs: FileSystem): InputStream = { + // It's not clear whether FileSystem.open() throws FileNotFoundException or just plain + // IOException when a file does not exist, so try our best to throw a proper exception. + if (!fs.exists(logPath)) { + throw new FileNotFoundException(s"File ${logPath} does not exist.") + } + + new BufferedInputStream(fs.open(logPath)) + } + + private def compressionCodecForLogPath(conf: SparkConf, logPath: Path): Option[CompressionCodec] = { + // Compression codec is encoded as an extension, e.g. app_123.lzf + // Since we sanitize the app ID to not include periods, it is safe to split on it + val logBaseName = logPath.getName.stripSuffix(IN_PROGRESS) + logBaseName.split("\\.").tail.lastOption.map { codecName => + compressionCodecMap.getOrElseUpdate(codecName, loadCompressionCodec(conf, codecName)) + } + } + + private def sanitize(str: String): String = { + str.replaceAll("[ :/]", "-").replaceAll("[.${}'\"]", "_").toLowerCase + } + + private def sparkEventFromJson(json: JValue): Option[SparkListenerEvent] = { + val environmentUpdate = getFormattedClassName(SparkListenerEnvironmentUpdate) + + (json \ "Event").extract[String] match { + case `environmentUpdate` => Some(environmentUpdateFromJson(json)) + case _ => None + } + } + + private def getFormattedClassName(obj: AnyRef): String = obj.getClass.getSimpleName.replace("$", "") + + private def environmentUpdateFromJson(json: JValue): SparkListenerEnvironmentUpdate = { + val environmentDetails = Map[String, Seq[(String, String)]]( + "JVM Information" -> mapFromJson(json \ "JVM Information").toSeq, + "Spark Properties" -> mapFromJson(json \ "Spark Properties").toSeq, + "System Properties" -> mapFromJson(json \ "System Properties").toSeq, + "Classpath Entries" -> mapFromJson(json \ "Classpath Entries").toSeq) + SparkListenerEnvironmentUpdate(environmentDetails) + } + + private def mapFromJson(json: JValue): Map[String, String] = { + val jsonFields = json.asInstanceOf[JObject].obj + jsonFields.map { case JField(k, JString(v)) => (k, v) }.toMap + } + + /** Return an option that translates JNothing to None */ + private def jsonOption(json: JValue): Option[JValue] = { + json match { + case JNothing => None + case value: JValue => Some(value) + } + } +} diff --git a/app/com/linkedin/drelephant/spark/fetchers/SparkRestClient.scala b/app/com/linkedin/drelephant/spark/fetchers/SparkRestClient.scala new file mode 100644 index 000000000..584748916 --- /dev/null +++ b/app/com/linkedin/drelephant/spark/fetchers/SparkRestClient.scala @@ -0,0 +1,161 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.spark.fetchers + +import java.net.URI +import java.text.SimpleDateFormat +import java.util.{Calendar, SimpleTimeZone} + +import scala.async.Async +import scala.concurrent.{ExecutionContext, Future} +import scala.util.control.NonFatal + +import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper} +import com.fasterxml.jackson.module.scala.DefaultScalaModule +import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper +import com.linkedin.drelephant.spark.data.SparkRestDerivedData +import com.linkedin.drelephant.spark.fetchers.statusapiv1.{ApplicationAttemptInfo, ApplicationInfo, ExecutorSummary, JobData, StageData} +import javax.ws.rs.client.{Client, ClientBuilder, WebTarget} +import javax.ws.rs.core.MediaType +import org.apache.log4j.Logger +import org.apache.spark.SparkConf + + +/** + * A client for getting data from the Spark monitoring REST API, e.g. . + * + * Jersey classloading seems to be brittle (at least when testing in the console), so some of the implementation is non-lazy + * or synchronous when needed. + */ +class SparkRestClient(sparkConf: SparkConf) { + import SparkRestClient._ + import Async.{async, await} + + private val logger: Logger = Logger.getLogger(classOf[SparkRestClient]) + + private val client: Client = ClientBuilder.newClient() + + private val historyServerUri: URI = sparkConf.getOption(HISTORY_SERVER_ADDRESS_KEY) match { + case Some(historyServerAddress) => + val baseUri: URI = + // Latest versions of CDH include http in their history server address configuration. + // However, it is not recommended by Spark documentation(http://spark.apache.org/docs/latest/running-on-yarn.html) + if (historyServerAddress.contains(s"http://")) { + new URI(historyServerAddress) + } else { + new URI(s"http://${historyServerAddress}") + } + require(baseUri.getPath == "") + baseUri + case None => + throw new IllegalArgumentException("spark.yarn.historyServer.address not provided; can't use Spark REST API") + } + + private val apiTarget: WebTarget = client.target(historyServerUri).path(API_V1_MOUNT_PATH) + + def fetchData(appId: String)(implicit ec: ExecutionContext): Future[SparkRestDerivedData] = { + val appTarget = apiTarget.path(s"applications/${appId}") + logger.info(s"calling REST API at ${appTarget.getUri}") + + val applicationInfo = getApplicationInfo(appTarget) + + // Limit scope of async. + async { + val lastAttemptId = applicationInfo.attempts.maxBy {_.startTime}.attemptId + val attemptTarget = lastAttemptId.map(appTarget.path).getOrElse(appTarget) + val futureJobDatas = async { getJobDatas(attemptTarget) } + val futureStageDatas = async { getStageDatas(attemptTarget) } + val futureExecutorSummaries = async { getExecutorSummaries(attemptTarget) } + SparkRestDerivedData( + applicationInfo, + await(futureJobDatas), + await(futureStageDatas), + await(futureExecutorSummaries) + ) + } + } + + private def getApplicationInfo(appTarget: WebTarget): ApplicationInfo = { + try { + get(appTarget, SparkRestObjectMapper.readValue[ApplicationInfo]) + } catch { + case NonFatal(e) => { + logger.error(s"error reading applicationInfo ${appTarget.getUri}", e) + throw e + } + } + } + + private def getJobDatas(attemptTarget: WebTarget): Seq[JobData] = { + val target = attemptTarget.path("jobs") + try { + get(target, SparkRestObjectMapper.readValue[Seq[JobData]]) + } catch { + case NonFatal(e) => { + logger.error(s"error reading jobData ${target.getUri}", e) + throw e + } + } + } + + private def getStageDatas(attemptTarget: WebTarget): Seq[StageData] = { + val target = attemptTarget.path("stages") + try { + get(target, SparkRestObjectMapper.readValue[Seq[StageData]]) + } catch { + case NonFatal(e) => { + logger.error(s"error reading stageData ${target.getUri}", e) + throw e + } + } + } + + private def getExecutorSummaries(attemptTarget: WebTarget): Seq[ExecutorSummary] = { + val target = attemptTarget.path("executors") + try { + get(target, SparkRestObjectMapper.readValue[Seq[ExecutorSummary]]) + } catch { + case NonFatal(e) => { + logger.error(s"error reading executorSummary ${target.getUri}", e) + throw e + } + } + } +} + +object SparkRestClient { + val HISTORY_SERVER_ADDRESS_KEY = "spark.yarn.historyServer.address" + val API_V1_MOUNT_PATH = "api/v1" + + val SparkRestObjectMapper = { + val dateFormat = { + val iso8601 = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'GMT'") + val cal = Calendar.getInstance(new SimpleTimeZone(0, "GMT")) + iso8601.setCalendar(cal) + iso8601 + } + + val objectMapper = new ObjectMapper() with ScalaObjectMapper + objectMapper.setDateFormat(dateFormat) + objectMapper.registerModule(DefaultScalaModule) + objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + objectMapper + } + + def get[T](webTarget: WebTarget, converter: String => T): T = + converter(webTarget.request(MediaType.APPLICATION_JSON).get(classOf[String])) +} diff --git a/app/com/linkedin/drelephant/spark/fetchers/statusapiv1/statusapiv1.scala b/app/com/linkedin/drelephant/spark/fetchers/statusapiv1/statusapiv1.scala new file mode 100644 index 000000000..d586d4b0f --- /dev/null +++ b/app/com/linkedin/drelephant/spark/fetchers/statusapiv1/statusapiv1.scala @@ -0,0 +1,250 @@ +/* + * Originally from + * https://github.com/apache/spark/blob/v1.4.1/core/src/main/scala/org/apache/spark/status/api/v1/api.scala + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright 2016 LinkedIn Corp. + * + * To keep up to date, please copy + * https://github.com/apache/spark/blob/v1.4.1/core/src/main/scala/org/apache/spark/status/api/v1/api.scala + * and maintain in this package. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package com.linkedin.drelephant.spark.fetchers.statusapiv1 + +import java.util.Date + +import scala.collection.Map + +import org.apache.spark.JobExecutionStatus +import org.apache.spark.status.api.v1.StageStatus + +class ApplicationInfo( + val id: String, + val name: String, + val attempts: Seq[ApplicationAttemptInfo]) + +class ApplicationAttemptInfo( + val attemptId: Option[String], + val startTime: Date, + val endTime: Date, + val sparkUser: String, + val completed: Boolean = false) + +class ExecutorStageSummary( + val taskTime : Long, + val failedTasks : Int, + val succeededTasks : Int, + val inputBytes : Long, + val outputBytes : Long, + val shuffleRead : Long, + val shuffleWrite : Long, + val memoryBytesSpilled : Long, + val diskBytesSpilled : Long) + +class ExecutorSummary( + val id: String, + val hostPort: String, + val rddBlocks: Int, + val memoryUsed: Long, + val diskUsed: Long, + val activeTasks: Int, + val failedTasks: Int, + val completedTasks: Int, + val totalTasks: Int, + val totalDuration: Long, + val totalInputBytes: Long, + val totalShuffleRead: Long, + val totalShuffleWrite: Long, + val maxMemory: Long, + val executorLogs: Map[String, String]) + +class JobData( + val jobId: Int, + val name: String, + val description: Option[String], + val submissionTime: Option[Date], + val completionTime: Option[Date], + val stageIds: Seq[Int], + val jobGroup: Option[String], + val status: JobExecutionStatus, + val numTasks: Int, + val numActiveTasks: Int, + val numCompletedTasks: Int, + val numSkippedTasks: Int, + val numFailedTasks: Int, + val numActiveStages: Int, + val numCompletedStages: Int, + val numSkippedStages: Int, + val numFailedStages: Int) + +// Q: should Tachyon size go in here as well? currently the UI only shows it on the overall storage +// page ... does anybody pay attention to it? +class RDDStorageInfo( + val id: Int, + val name: String, + val numPartitions: Int, + val numCachedPartitions: Int, + val storageLevel: String, + val memoryUsed: Long, + val diskUsed: Long, + val dataDistribution: Option[Seq[RDDDataDistribution]], + val partitions: Option[Seq[RDDPartitionInfo]]) + +class RDDDataDistribution( + val address: String, + val memoryUsed: Long, + val memoryRemaining: Long, + val diskUsed: Long) + +class RDDPartitionInfo( + val blockName: String, + val storageLevel: String, + val memoryUsed: Long, + val diskUsed: Long, + val executors: Seq[String]) + +class StageData( + val status: StageStatus, + val stageId: Int, + val attemptId: Int, + val numActiveTasks: Int , + val numCompleteTasks: Int, + val numFailedTasks: Int, + + val executorRunTime: Long, + + val inputBytes: Long, + val inputRecords: Long, + val outputBytes: Long, + val outputRecords: Long, + val shuffleReadBytes: Long, + val shuffleReadRecords: Long, + val shuffleWriteBytes: Long, + val shuffleWriteRecords: Long, + val memoryBytesSpilled: Long, + val diskBytesSpilled: Long, + + val name: String, + val details: String, + val schedulingPool: String, + + val accumulatorUpdates: Seq[AccumulableInfo], + val tasks: Option[Map[Long, TaskData]], + val executorSummary: Option[Map[String, ExecutorStageSummary]]) + +class TaskData( + val taskId: Long, + val index: Int, + val attempt: Int, + val launchTime: Date, + val executorId: String, + val host: String, + val taskLocality: String, + val speculative: Boolean, + val accumulatorUpdates: Seq[AccumulableInfo], + val errorMessage: Option[String] = None, + val taskMetrics: Option[TaskMetrics] = None) + +class TaskMetrics( + val executorDeserializeTime: Long, + val executorRunTime: Long, + val resultSize: Long, + val jvmGcTime: Long, + val resultSerializationTime: Long, + val memoryBytesSpilled: Long, + val diskBytesSpilled: Long, + val inputMetrics: Option[InputMetrics], + val outputMetrics: Option[OutputMetrics], + val shuffleReadMetrics: Option[ShuffleReadMetrics], + val shuffleWriteMetrics: Option[ShuffleWriteMetrics]) + +class InputMetrics( + val bytesRead: Long, + val recordsRead: Long) + +class OutputMetrics( + val bytesWritten: Long, + val recordsWritten: Long) + +class ShuffleReadMetrics( + val remoteBlocksFetched: Int, + val localBlocksFetched: Int, + val fetchWaitTime: Long, + val remoteBytesRead: Long, + val totalBlocksFetched: Int, + val recordsRead: Long) + +class ShuffleWriteMetrics( + val bytesWritten: Long, + val writeTime: Long, + val recordsWritten: Long) + +class TaskMetricDistributions( + val quantiles: IndexedSeq[Double], + + val executorDeserializeTime: IndexedSeq[Double], + val executorRunTime: IndexedSeq[Double], + val resultSize: IndexedSeq[Double], + val jvmGcTime: IndexedSeq[Double], + val resultSerializationTime: IndexedSeq[Double], + val memoryBytesSpilled: IndexedSeq[Double], + val diskBytesSpilled: IndexedSeq[Double], + + val inputMetrics: Option[InputMetricDistributions], + val outputMetrics: Option[OutputMetricDistributions], + val shuffleReadMetrics: Option[ShuffleReadMetricDistributions], + val shuffleWriteMetrics: Option[ShuffleWriteMetricDistributions]) + +class InputMetricDistributions( + val bytesRead: IndexedSeq[Double], + val recordsRead: IndexedSeq[Double]) + +class OutputMetricDistributions( + val bytesWritten: IndexedSeq[Double], + val recordsWritten: IndexedSeq[Double]) + +class ShuffleReadMetricDistributions( + val readBytes: IndexedSeq[Double], + val readRecords: IndexedSeq[Double], + val remoteBlocksFetched: IndexedSeq[Double], + val localBlocksFetched: IndexedSeq[Double], + val fetchWaitTime: IndexedSeq[Double], + val remoteBytesRead: IndexedSeq[Double], + val totalBlocksFetched: IndexedSeq[Double]) + +class ShuffleWriteMetricDistributions( + val writeBytes: IndexedSeq[Double], + val writeRecords: IndexedSeq[Double], + val writeTime: IndexedSeq[Double]) + +class AccumulableInfo( + val id: Long, + val name: String, + val update: Option[String], + val value: String) diff --git a/app/com/linkedin/drelephant/spark/heuristics/ConfigurationHeuristic.scala b/app/com/linkedin/drelephant/spark/heuristics/ConfigurationHeuristic.scala new file mode 100644 index 000000000..6c8f7a351 --- /dev/null +++ b/app/com/linkedin/drelephant/spark/heuristics/ConfigurationHeuristic.scala @@ -0,0 +1,145 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.spark.heuristics + +import com.linkedin.drelephant.math.Statistics + +import scala.collection.JavaConverters +import scala.util.Try + +import com.linkedin.drelephant.analysis.{HeuristicResultDetails, Heuristic, HeuristicResult, Severity} +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData +import com.linkedin.drelephant.spark.data.SparkApplicationData +import com.linkedin.drelephant.util.MemoryFormatUtils + + +/** + * A heuristic based on an app's known configuration. + * + * The results from this heuristic primarily inform users about key app configuration settings, including + * driver memory, executor cores, executor instances, executor memory, and the serializer. + */ +class ConfigurationHeuristic(private val heuristicConfigurationData: HeuristicConfigurationData) + extends Heuristic[SparkApplicationData] { + import ConfigurationHeuristic._ + import JavaConverters._ + + val serializerIfNonNullRecommendation: String = + Option(heuristicConfigurationData.getParamMap.get(SERIALIZER_IF_NON_NULL_RECOMMENDATION_KEY)) + .getOrElse(DEFAULT_SERIALIZER_IF_NON_NULL_RECOMMENDATION) + + val serializerIfNonNullSeverityIfRecommendationUnmet: Severity = + DEFAULT_SERIALIZER_IF_NON_NULL_SEVERITY_IF_RECOMMENDATION_UNMET + + override def getHeuristicConfData(): HeuristicConfigurationData = heuristicConfigurationData + + override def apply(data: SparkApplicationData): HeuristicResult = { + val evaluator = new Evaluator(this, data) + + def formatProperty(property: Option[String]): String = + property.getOrElse("Not presented. Using default.") + + val resultDetails = Seq( + new HeuristicResultDetails( + SPARK_DRIVER_MEMORY_KEY, + formatProperty(evaluator.driverMemoryBytes.map(MemoryFormatUtils.bytesToString)) + ), + new HeuristicResultDetails( + SPARK_EXECUTOR_MEMORY_KEY, + formatProperty(evaluator.executorMemoryBytes.map(MemoryFormatUtils.bytesToString)) + ), + new HeuristicResultDetails( + SPARK_EXECUTOR_INSTANCES_KEY, + formatProperty(evaluator.executorInstances.map(_.toString)) + ), + new HeuristicResultDetails( + SPARK_EXECUTOR_CORES_KEY, + formatProperty(evaluator.executorCores.map(_.toString)) + ), + new HeuristicResultDetails( + SPARK_APPLICATION_DURATION, + evaluator.applicationDuration.toString + " Seconds" + ), + new HeuristicResultDetails( + SPARK_SERIALIZER_KEY, + formatProperty(evaluator.serializer) + ) + ) + val result = new HeuristicResult( + heuristicConfigurationData.getClassName, + heuristicConfigurationData.getHeuristicName, + evaluator.severity, + 0, + resultDetails.asJava + ) + result + } +} + +object ConfigurationHeuristic { + val DEFAULT_SERIALIZER_IF_NON_NULL_RECOMMENDATION = "org.apache.spark.serializer.KryoSerializer" + val DEFAULT_SERIALIZER_IF_NON_NULL_SEVERITY_IF_RECOMMENDATION_UNMET = Severity.MODERATE + + val SERIALIZER_IF_NON_NULL_RECOMMENDATION_KEY = "serializer_if_non_null_recommendation" + + val SPARK_DRIVER_MEMORY_KEY = "spark.driver.memory" + val SPARK_EXECUTOR_MEMORY_KEY = "spark.executor.memory" + val SPARK_EXECUTOR_INSTANCES_KEY = "spark.executor.instances" + val SPARK_EXECUTOR_CORES_KEY = "spark.executor.cores" + val SPARK_SERIALIZER_KEY = "spark.serializer" + val SPARK_APPLICATION_DURATION = "spark.application.duration" + + class Evaluator(configurationHeuristic: ConfigurationHeuristic, data: SparkApplicationData) { + lazy val appConfigurationProperties: Map[String, String] = + data.appConfigurationProperties + + lazy val driverMemoryBytes: Option[Long] = + Try(getProperty(SPARK_DRIVER_MEMORY_KEY).map(MemoryFormatUtils.stringToBytes)).getOrElse(None) + + lazy val executorMemoryBytes: Option[Long] = + Try(getProperty(SPARK_EXECUTOR_MEMORY_KEY).map(MemoryFormatUtils.stringToBytes)).getOrElse(None) + + lazy val executorInstances: Option[Int] = + Try(getProperty(SPARK_EXECUTOR_INSTANCES_KEY).map(_.toInt)).getOrElse(None) + + lazy val executorCores: Option[Int] = + Try(getProperty(SPARK_EXECUTOR_CORES_KEY).map(_.toInt)).getOrElse(None) + + lazy val applicationDuration : Long = { + require(data.applicationInfo.attempts.nonEmpty) + val lastApplicationAttemptInfo = data.applicationInfo.attempts.last + (lastApplicationAttemptInfo.endTime.getTime - lastApplicationAttemptInfo.startTime.getTime) / Statistics.SECOND_IN_MS + } + + lazy val serializer: Option[String] = getProperty(SPARK_SERIALIZER_KEY) + + lazy val serializerSeverity: Severity = serializer match { + case None => Severity.NONE + case Some(`serializerIfNonNullRecommendation`) => Severity.NONE + case Some(_) => serializerIfNonNullSeverityIfRecommendationUnmet + } + + lazy val severity: Severity = serializerSeverity + + private val serializerIfNonNullRecommendation: String = configurationHeuristic.serializerIfNonNullRecommendation + + private val serializerIfNonNullSeverityIfRecommendationUnmet: Severity = + configurationHeuristic.serializerIfNonNullSeverityIfRecommendationUnmet + + private def getProperty(key: String): Option[String] = appConfigurationProperties.get(key) + } +} diff --git a/app/com/linkedin/drelephant/spark/heuristics/ExecutorsHeuristic.scala b/app/com/linkedin/drelephant/spark/heuristics/ExecutorsHeuristic.scala new file mode 100644 index 000000000..dae604124 --- /dev/null +++ b/app/com/linkedin/drelephant/spark/heuristics/ExecutorsHeuristic.scala @@ -0,0 +1,234 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.spark.heuristics + +import scala.collection.JavaConverters +import scala.collection.mutable.ArrayBuffer + +import com.linkedin.drelephant.analysis.{Heuristic, HeuristicResult, HeuristicResultDetails, Severity, SeverityThresholds} +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData +import com.linkedin.drelephant.math.Statistics +import com.linkedin.drelephant.spark.data.SparkApplicationData +import com.linkedin.drelephant.spark.fetchers.statusapiv1.ExecutorSummary +import com.linkedin.drelephant.util.MemoryFormatUtils + + +/** + * A heuristic based on metrics for a Spark app's executors. + * + * This heuristic concerns the distribution (min, 25p, median, 75p, max) of key executor metrics including input bytes, + * shuffle read bytes, shuffle write bytes, storage memory used, and task time. The max-to-median ratio determines the + * severity of any particular metric. + */ +class ExecutorsHeuristic(private val heuristicConfigurationData: HeuristicConfigurationData) + extends Heuristic[SparkApplicationData] { + import ExecutorsHeuristic._ + import JavaConverters._ + + val maxToMedianRatioSeverityThresholds: SeverityThresholds = + SeverityThresholds.parse(heuristicConfigurationData.getParamMap.get(MAX_TO_MEDIAN_RATIO_SEVERITY_THRESHOLDS_KEY), ascending = true) + .getOrElse(DEFAULT_MAX_TO_MEDIAN_RATIO_SEVERITY_THRESHOLDS) + + val ignoreMaxBytesLessThanThreshold: Long = + Option(heuristicConfigurationData.getParamMap.get(IGNORE_MAX_BYTES_LESS_THAN_THRESHOLD_KEY)) + .map(MemoryFormatUtils.stringToBytes) + .getOrElse(DEFAULT_IGNORE_MAX_BYTES_LESS_THAN_THRESHOLD) + + val ignoreMaxMillisLessThanThreshold: Long = + Option(heuristicConfigurationData.getParamMap.get(IGNORE_MAX_MILLIS_LESS_THAN_THRESHOLD_KEY)) + .map(_.toLong) + .getOrElse(DEFAULT_IGNORE_MAX_MILLIS_LESS_THAN_THRESHOLD) + + override def getHeuristicConfData(): HeuristicConfigurationData = heuristicConfigurationData + + override def apply(data: SparkApplicationData): HeuristicResult = { + val evaluator = new Evaluator(this, data) + + def formatDistribution(distribution: Distribution, longFormatter: Long => String, separator: String = ", "): String = { + val labels = Seq( + s"min: ${longFormatter(distribution.min)}", + s"p25: ${longFormatter(distribution.p25)}", + s"median: ${longFormatter(distribution.median)}", + s"p75: ${longFormatter(distribution.p75)}", + s"max: ${longFormatter(distribution.max)}" + ) + labels.mkString(separator) + } + + def formatDistributionBytes(distribution: Distribution): String = + formatDistribution(distribution, MemoryFormatUtils.bytesToString) + + def formatDistributionDuration(distribution: Distribution): String = + formatDistribution(distribution, Statistics.readableTimespan) + + val resultDetails = Seq( + new HeuristicResultDetails( + "Total executor storage memory allocated", + MemoryFormatUtils.bytesToString(evaluator.totalStorageMemoryAllocated) + ), + new HeuristicResultDetails( + "Total executor storage memory used", + MemoryFormatUtils.bytesToString(evaluator.totalStorageMemoryUsed) + ), + new HeuristicResultDetails( + "Executor storage memory utilization rate", + f"${evaluator.storageMemoryUtilizationRate}%1.3f" + ), + new HeuristicResultDetails( + "Executor storage memory used distribution", + formatDistributionBytes(evaluator.storageMemoryUsedDistribution) + ), + new HeuristicResultDetails( + "Executor task time distribution", + formatDistributionDuration(evaluator.taskTimeDistribution) + ), + new HeuristicResultDetails( + "Executor task time sum", + (evaluator.totalTaskTime / Statistics.SECOND_IN_MS).toString + ), + new HeuristicResultDetails( + "Executor input bytes distribution", + formatDistributionBytes(evaluator.inputBytesDistribution) + ), + new HeuristicResultDetails( + "Executor shuffle read bytes distribution", + formatDistributionBytes(evaluator.shuffleReadBytesDistribution) + ), + new HeuristicResultDetails( + "Executor shuffle write bytes distribution", + formatDistributionBytes(evaluator.shuffleWriteBytesDistribution) + ) + ) + val result = new HeuristicResult( + heuristicConfigurationData.getClassName, + heuristicConfigurationData.getHeuristicName, + evaluator.severity, + 0, + resultDetails.asJava + ) + result + } +} + +object ExecutorsHeuristic { + import JavaConverters._ + import scala.concurrent.duration._ + + val DEFAULT_MAX_TO_MEDIAN_RATIO_SEVERITY_THRESHOLDS: SeverityThresholds = SeverityThresholds( + low = math.pow(10, 0.125), // ~1.334 + moderate = math.pow(10, 0.25), // ~1.778 + severe = math.pow(10, 0.5), // ~3.162 + critical = 10, + ascending = true + ) + + val DEFAULT_IGNORE_MAX_BYTES_LESS_THAN_THRESHOLD: Long = MemoryFormatUtils.stringToBytes("100 MB") + + val DEFAULT_IGNORE_MAX_MILLIS_LESS_THAN_THRESHOLD: Long = Duration(5, MINUTES).toMillis + + val MAX_TO_MEDIAN_RATIO_SEVERITY_THRESHOLDS_KEY: String = "max_to_median_ratio_severity_thresholds" + + val IGNORE_MAX_BYTES_LESS_THAN_THRESHOLD_KEY: String = "ignore_max_bytes_less_than_threshold" + + val IGNORE_MAX_MILLIS_LESS_THAN_THRESHOLD_KEY: String = "ignore_max_millis_less_than_threshold" + + class Evaluator(executorsHeuristic: ExecutorsHeuristic, data: SparkApplicationData) { + lazy val executorSummaries: Seq[ExecutorSummary] = data.executorSummaries + + lazy val totalStorageMemoryAllocated: Long = executorSummaries.map { _.maxMemory }.sum + + lazy val totalStorageMemoryUsed: Long = executorSummaries.map { _.memoryUsed }.sum + + lazy val storageMemoryUtilizationRate: Double = totalStorageMemoryUsed.toDouble / totalStorageMemoryAllocated.toDouble + + lazy val storageMemoryUsedDistribution: Distribution = + Distribution(executorSummaries.map { _.memoryUsed }) + + lazy val storageMemoryUsedSeverity: Severity = + severityOfDistribution(storageMemoryUsedDistribution, ignoreMaxBytesLessThanThreshold) + + lazy val taskTimeDistribution: Distribution = + Distribution(executorSummaries.map { _.totalDuration }) + + lazy val totalTaskTime : Long = executorSummaries.map(_.totalDuration).sum + + lazy val taskTimeSeverity: Severity = + severityOfDistribution(taskTimeDistribution, ignoreMaxMillisLessThanThreshold) + + lazy val inputBytesDistribution: Distribution = + Distribution(executorSummaries.map { _.totalInputBytes }) + + lazy val inputBytesSeverity: Severity = + severityOfDistribution(inputBytesDistribution, ignoreMaxBytesLessThanThreshold) + + lazy val shuffleReadBytesDistribution: Distribution = + Distribution(executorSummaries.map { _.totalShuffleRead }) + + lazy val shuffleReadBytesSeverity: Severity = + severityOfDistribution(shuffleReadBytesDistribution, ignoreMaxBytesLessThanThreshold) + + lazy val shuffleWriteBytesDistribution: Distribution = + Distribution(executorSummaries.map { _.totalShuffleWrite }) + + lazy val shuffleWriteBytesSeverity: Severity = + severityOfDistribution(shuffleWriteBytesDistribution, ignoreMaxBytesLessThanThreshold) + + lazy val severity: Severity = Severity.max( + storageMemoryUsedSeverity, + taskTimeSeverity, + inputBytesSeverity, + shuffleReadBytesSeverity, + shuffleWriteBytesSeverity + ) + + private[heuristics] def severityOfDistribution( + distribution: Distribution, + ignoreMaxLessThanThreshold: Long, + severityThresholds: SeverityThresholds = maxToMedianRatioSeverityThresholds + ): Severity = { + if (distribution.max < ignoreMaxLessThanThreshold) { + Severity.NONE + } else if (distribution.median == 0L) { + severityThresholds.severityOf(Long.MaxValue) + } else { + severityThresholds.severityOf(BigDecimal(distribution.max) / BigDecimal(distribution.median)) + } + } + + private lazy val maxToMedianRatioSeverityThresholds = executorsHeuristic.maxToMedianRatioSeverityThresholds + + private lazy val ignoreMaxBytesLessThanThreshold = executorsHeuristic.ignoreMaxBytesLessThanThreshold + + private lazy val ignoreMaxMillisLessThanThreshold = executorsHeuristic.ignoreMaxMillisLessThanThreshold + } + + case class Distribution(min: Long, p25: Long, median: Long, p75: Long, max: Long) + + object Distribution { + def apply(values: Seq[Long]): Distribution = { + val sortedValues = values.sorted + val sortedValuesAsJava = sortedValues.map(Long.box).to[ArrayBuffer].asJava + Distribution( + sortedValues.min, + p25 = Statistics.percentile(sortedValuesAsJava, 25), + Statistics.median(sortedValuesAsJava), + p75 = Statistics.percentile(sortedValuesAsJava, 75), + sortedValues.max + ) + } + } +} diff --git a/app/com/linkedin/drelephant/spark/heuristics/JobsHeuristic.scala b/app/com/linkedin/drelephant/spark/heuristics/JobsHeuristic.scala new file mode 100644 index 000000000..7014acfe8 --- /dev/null +++ b/app/com/linkedin/drelephant/spark/heuristics/JobsHeuristic.scala @@ -0,0 +1,148 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.spark.heuristics + +import scala.collection.JavaConverters + +import com.linkedin.drelephant.analysis.{Heuristic, HeuristicResult, HeuristicResultDetails, Severity, SeverityThresholds} +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData +import com.linkedin.drelephant.spark.data.SparkApplicationData +import com.linkedin.drelephant.spark.fetchers.statusapiv1.JobData +import org.apache.spark.JobExecutionStatus + + +/** + * A heuristic based on metrics for a Spark app's jobs. + * + * This heuristic reports job failures and high task failure rates for each job. + */ +class JobsHeuristic(private val heuristicConfigurationData: HeuristicConfigurationData) + extends Heuristic[SparkApplicationData] { + import JobsHeuristic._ + import JavaConverters._ + + val jobFailureRateSeverityThresholds: SeverityThresholds = + SeverityThresholds.parse(heuristicConfigurationData.getParamMap.get(JOB_FAILURE_RATE_SEVERITY_THRESHOLDS_KEY), ascending = true) + .getOrElse(DEFAULT_JOB_FAILURE_RATE_SEVERITY_THRESHOLDS) + + val taskFailureRateSeverityThresholds: SeverityThresholds = + SeverityThresholds.parse(heuristicConfigurationData.getParamMap.get(TASK_FAILURE_RATE_SEVERITY_THRESHOLDS_KEY), ascending = true) + .getOrElse(DEFAULT_TASK_FAILURE_RATE_SEVERITY_THRESHOLDS) + + override def getHeuristicConfData(): HeuristicConfigurationData = heuristicConfigurationData + + override def apply(data: SparkApplicationData): HeuristicResult = { + val evaluator = new Evaluator(this, data) + + def formatFailedJobs(failedJobs: Seq[JobData]): String = failedJobs.map(formatFailedJob).mkString("\n") + + def formatFailedJob(jobData: JobData): String = f"job ${jobData.jobId}, ${jobData.name}" + + def formatJobsWithHighTaskFailureRates(jobsWithHighTaskFailureRates: Seq[(JobData, Double)]): String = + jobsWithHighTaskFailureRates + .map { case (jobData, taskFailureRate) => formatJobWithHighTaskFailureRate(jobData, taskFailureRate) } + .mkString("\n") + + def formatJobWithHighTaskFailureRate(jobData: JobData, taskFailureRate: Double): String = + f"job ${jobData.jobId}, ${jobData.name} (task failure rate: ${taskFailureRate}%1.3f)" + + val resultDetails = Seq( + new HeuristicResultDetails("Spark completed jobs count", evaluator.numCompletedJobs.toString), + new HeuristicResultDetails("Spark failed jobs count", evaluator.numFailedJobs.toString), + new HeuristicResultDetails("Spark failed jobs list", formatFailedJobs(evaluator.failedJobs)), + new HeuristicResultDetails("Spark job failure rate", f"${evaluator.jobFailureRate.getOrElse(0.0D)}%.3f"), + new HeuristicResultDetails( + "Spark jobs with high task failure rates", + formatJobsWithHighTaskFailureRates(evaluator.jobsWithHighTaskFailureRates) + ) + ) + val result = new HeuristicResult( + heuristicConfigurationData.getClassName, + heuristicConfigurationData.getHeuristicName, + evaluator.severity, + 0, + resultDetails.asJava + ) + result + } +} + +object JobsHeuristic { + /** The default severity thresholds for the rate of an application's jobs failing. */ + val DEFAULT_JOB_FAILURE_RATE_SEVERITY_THRESHOLDS = + SeverityThresholds(low = 0.1D, moderate = 0.3D, severe = 0.5D, critical = 0.5D, ascending = true) + + /** The default severity thresholds for the rate of a job's tasks failing. */ + val DEFAULT_TASK_FAILURE_RATE_SEVERITY_THRESHOLDS = + SeverityThresholds(low = 0.1D, moderate = 0.3D, severe = 0.5D, critical = 0.5D, ascending = true) + + val JOB_FAILURE_RATE_SEVERITY_THRESHOLDS_KEY = "job_failure_rate_severity_thresholds" + + val TASK_FAILURE_RATE_SEVERITY_THRESHOLDS_KEY = "job_task_failure_rate_severity_thresholds" + + class Evaluator(jobsHeuristic: JobsHeuristic, data: SparkApplicationData) { + lazy val jobDatas: Seq[JobData] = data.jobDatas + + lazy val numCompletedJobs: Int = jobDatas.count { _.status == JobExecutionStatus.SUCCEEDED } + + lazy val numFailedJobs: Int = jobDatas.count { _.status == JobExecutionStatus.FAILED } + + lazy val failedJobs: Seq[JobData] = jobDatas.filter { _.status == JobExecutionStatus.FAILED } + + lazy val jobFailureRate: Option[Double] = { + // Currently, the calculation assumes there are no jobs with UNKNOWN or RUNNING state. + val numJobs = numCompletedJobs + numFailedJobs + if (numJobs == 0) None else Some(numFailedJobs.toDouble / numJobs.toDouble) + } + + lazy val jobsWithHighTaskFailureRates: Seq[(JobData, Double)] = + jobsWithHighTaskFailureRateSeverities.map { case (jobData, taskFailureRate, _) => (jobData, taskFailureRate) } + + lazy val severity: Severity = Severity.max((jobFailureRateSeverity +: taskFailureRateSeverities): _*) + + private lazy val jobFailureRateSeverityThresholds = jobsHeuristic.jobFailureRateSeverityThresholds + + private lazy val taskFailureRateSeverityThresholds = jobsHeuristic.taskFailureRateSeverityThresholds + + private lazy val jobFailureRateSeverity: Severity = + jobFailureRateSeverityThresholds.severityOf(jobFailureRate.getOrElse[Double](0.0D)) + + private lazy val jobsWithHighTaskFailureRateSeverities: Seq[(JobData, Double, Severity)] = + jobsAndTaskFailureRateSeverities.filter { case (_, _, severity) => severity.getValue > Severity.MODERATE.getValue } + + private lazy val jobsAndTaskFailureRateSeverities: Seq[(JobData, Double, Severity)] = for { + jobData <- jobDatas + (taskFailureRate, severity) = taskFailureRateAndSeverityOf(jobData) + } yield (jobData, taskFailureRate, severity) + + private lazy val taskFailureRateSeverities: Seq[Severity] = + jobsAndTaskFailureRateSeverities.map { case (_, _, severity) => severity } + + private def taskFailureRateAndSeverityOf(jobData: JobData): (Double, Severity) = { + val taskFailureRate = taskFailureRateOf(jobData).getOrElse(0.0D) + (taskFailureRate, taskFailureRateSeverityThresholds.severityOf(taskFailureRate)) + } + + private def taskFailureRateOf(jobData: JobData): Option[Double] = { + // Currently, the calculation doesn't include skipped or active tasks. + val numCompletedTasks = jobData.numCompletedTasks + val numFailedTasks = jobData.numFailedTasks + val numTasks = numCompletedTasks + numFailedTasks + if (numTasks == 0) None else Some(numFailedTasks.toDouble / numTasks.toDouble) + } + } +} diff --git a/app/com/linkedin/drelephant/spark/heuristics/StagesHeuristic.scala b/app/com/linkedin/drelephant/spark/heuristics/StagesHeuristic.scala new file mode 100644 index 000000000..dd92f814f --- /dev/null +++ b/app/com/linkedin/drelephant/spark/heuristics/StagesHeuristic.scala @@ -0,0 +1,210 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.spark.heuristics + +import com.linkedin.drelephant.spark.fetchers.statusapiv1.ExecutorSummary +import scala.collection.JavaConverters +import scala.concurrent.duration +import scala.concurrent.duration.Duration + +import com.linkedin.drelephant.analysis.{Heuristic, HeuristicResult, HeuristicResultDetails, Severity, SeverityThresholds} +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData +import com.linkedin.drelephant.math.Statistics +import com.linkedin.drelephant.spark.data.SparkApplicationData +import com.linkedin.drelephant.spark.fetchers.statusapiv1.StageData +import org.apache.spark.status.api.v1.StageStatus + + +/** + * A heuristic based on metrics for a Spark app's stages. + * + * This heuristic reports stage failures, high task failure rates for each stage, and long average executor runtimes for + * each stage. + */ +class StagesHeuristic(private val heuristicConfigurationData: HeuristicConfigurationData) + extends Heuristic[SparkApplicationData] { + import StagesHeuristic._ + import JavaConverters._ + + val stageFailureRateSeverityThresholds: SeverityThresholds = + SeverityThresholds.parse(heuristicConfigurationData.getParamMap.get(STAGE_FAILURE_RATE_SEVERITY_THRESHOLDS_KEY), ascending = true) + .getOrElse(DEFAULT_STAGE_FAILURE_RATE_SEVERITY_THRESHOLDS) + + val taskFailureRateSeverityThresholds: SeverityThresholds = + SeverityThresholds.parse(heuristicConfigurationData.getParamMap.get(TASK_FAILURE_RATE_SEVERITY_THRESHOLDS_KEY), ascending = true) + .getOrElse(DEFAULT_TASK_FAILURE_RATE_SEVERITY_THRESHOLDS) + + val stageRuntimeMillisSeverityThresholds: SeverityThresholds = + SeverityThresholds.parse(heuristicConfigurationData.getParamMap.get(STAGE_RUNTIME_MINUTES_SEVERITY_THRESHOLDS_KEY), ascending = true) + .map(minutesSeverityThresholdsToMillisSeverityThresholds) + .getOrElse(DEFAULT_STAGE_RUNTIME_MILLIS_SEVERITY_THRESHOLDS) + + override def getHeuristicConfData(): HeuristicConfigurationData = heuristicConfigurationData + + override def apply(data: SparkApplicationData): HeuristicResult = { + val evaluator = new Evaluator(this, data) + + def formatStagesWithHighTaskFailureRates(stagesWithHighTaskFailureRates: Seq[(StageData, Double)]): String = + stagesWithHighTaskFailureRates + .map { case (stageData, taskFailureRate) => formatStageWithHighTaskFailureRate(stageData, taskFailureRate) } + .mkString("\n") + + def formatStageWithHighTaskFailureRate(stageData: StageData, taskFailureRate: Double): String = + f"stage ${stageData.stageId}, attempt ${stageData.attemptId} (task failure rate: ${taskFailureRate}%1.3f)" + + def formatStagesWithLongAverageExecutorRuntimes(stagesWithLongAverageExecutorRuntimes: Seq[(StageData, Long)]): String = + stagesWithLongAverageExecutorRuntimes + .map { case (stageData, runtime) => formatStageWithLongRuntime(stageData, runtime) } + .mkString("\n") + + def formatStageWithLongRuntime(stageData: StageData, runtime: Long): String = + f"stage ${stageData.stageId}, attempt ${stageData.attemptId} (runtime: ${Statistics.readableTimespan(runtime)})" + + val resultDetails = Seq( + new HeuristicResultDetails("Spark completed stages count", evaluator.numCompletedStages.toString), + new HeuristicResultDetails("Spark failed stages count", evaluator.numFailedStages.toString), + new HeuristicResultDetails("Spark stage failure rate", f"${evaluator.stageFailureRate.getOrElse(0.0D)}%.3f"), + new HeuristicResultDetails( + "Spark stages with high task failure rates", + formatStagesWithHighTaskFailureRates(evaluator.stagesWithHighTaskFailureRates) + ), + new HeuristicResultDetails( + "Spark stages with long average executor runtimes", + formatStagesWithLongAverageExecutorRuntimes(evaluator.stagesWithLongAverageExecutorRuntimes) + ) + ) + val result = new HeuristicResult( + heuristicConfigurationData.getClassName, + heuristicConfigurationData.getHeuristicName, + evaluator.severity, + 0, + resultDetails.asJava + ) + result + } + +} + +object StagesHeuristic { + /** The default severity thresholds for the rate of an application's stages failing. */ + val DEFAULT_STAGE_FAILURE_RATE_SEVERITY_THRESHOLDS = + SeverityThresholds(low = 0.1D, moderate = 0.3D, severe = 0.5D, critical = 0.5D, ascending = true) + + /** The default severity thresholds for the rate of a stage's tasks failing. */ + val DEFAULT_TASK_FAILURE_RATE_SEVERITY_THRESHOLDS = + SeverityThresholds(low = 0.1D, moderate = 0.3D, severe = 0.5D, critical = 0.5D, ascending = true) + + /** The default severity thresholds for a stage's runtime. */ + val DEFAULT_STAGE_RUNTIME_MILLIS_SEVERITY_THRESHOLDS = SeverityThresholds( + low = Duration("15min").toMillis, + moderate = Duration("30min").toMillis, + severe = Duration("45min").toMillis, + critical = Duration("60min").toMillis, + ascending = true + ) + + val STAGE_FAILURE_RATE_SEVERITY_THRESHOLDS_KEY = "stage_failure_rate_severity_thresholds" + val TASK_FAILURE_RATE_SEVERITY_THRESHOLDS_KEY = "stage_task_failure_rate_severity_thresholds" + val STAGE_RUNTIME_MINUTES_SEVERITY_THRESHOLDS_KEY = "stage_runtime_minutes_severity_thresholds" + + val SPARK_EXECUTOR_INSTANCES_KEY = "spark.executor.instances" + + class Evaluator(stagesHeuristic: StagesHeuristic, data: SparkApplicationData) { + lazy val stageDatas: Seq[StageData] = data.stageDatas + + lazy val appConfigurationProperties: Map[String, String] = + data.appConfigurationProperties + + lazy val executorSummaries: Seq[ExecutorSummary] = data.executorSummaries + + lazy val numCompletedStages: Int = stageDatas.count { _.status == StageStatus.COMPLETE } + + lazy val numFailedStages: Int = stageDatas.count { _.status == StageStatus.FAILED } + + lazy val stageFailureRate: Option[Double] = { + val numStages = numCompletedStages + numFailedStages + if (numStages == 0) None else Some(numFailedStages.toDouble / numStages.toDouble) + } + + lazy val stagesWithHighTaskFailureRates: Seq[(StageData, Double)] = + stagesWithHighTaskFailureRateSeverities.map { case (stageData, taskFailureRate, _) => (stageData, taskFailureRate) } + + lazy val stagesWithLongAverageExecutorRuntimes: Seq[(StageData, Long)] = + stagesAndAverageExecutorRuntimeSeverities + .collect { case (stageData, runtime, severity) if severity.getValue > Severity.MODERATE.getValue => (stageData, runtime) } + + lazy val severity: Severity = Severity.max((stageFailureRateSeverity +: (taskFailureRateSeverities ++ runtimeSeverities)): _*) + + private lazy val stageFailureRateSeverityThresholds = stagesHeuristic.stageFailureRateSeverityThresholds + + private lazy val taskFailureRateSeverityThresholds = stagesHeuristic.taskFailureRateSeverityThresholds + + private lazy val stageRuntimeMillisSeverityThresholds = stagesHeuristic.stageRuntimeMillisSeverityThresholds + + private lazy val stageFailureRateSeverity: Severity = + stageFailureRateSeverityThresholds.severityOf(stageFailureRate.getOrElse[Double](0.0D)) + + private lazy val stagesWithHighTaskFailureRateSeverities: Seq[(StageData, Double, Severity)] = + stagesAndTaskFailureRateSeverities.filter { case (_, _, severity) => severity.getValue > Severity.MODERATE.getValue } + + private lazy val stagesAndTaskFailureRateSeverities: Seq[(StageData, Double, Severity)] = for { + stageData <- stageDatas + (taskFailureRate, severity) = taskFailureRateAndSeverityOf(stageData) + } yield (stageData, taskFailureRate, severity) + + private lazy val taskFailureRateSeverities: Seq[Severity] = + stagesAndTaskFailureRateSeverities.map { case (_, _, severity) => severity } + + private lazy val stagesAndAverageExecutorRuntimeSeverities: Seq[(StageData, Long, Severity)] = for { + stageData <- stageDatas + (runtime, severity) = averageExecutorRuntimeAndSeverityOf(stageData) + } yield (stageData, runtime, severity) + + private lazy val runtimeSeverities: Seq[Severity] = stagesAndAverageExecutorRuntimeSeverities.map { case (_, _, severity) => severity } + + private lazy val executorInstances: Int = + appConfigurationProperties.get(SPARK_EXECUTOR_INSTANCES_KEY).map(_.toInt).getOrElse(executorSummaries.size) + + private def taskFailureRateAndSeverityOf(stageData: StageData): (Double, Severity) = { + val taskFailureRate = taskFailureRateOf(stageData).getOrElse(0.0D) + (taskFailureRate, taskFailureRateSeverityThresholds.severityOf(taskFailureRate)) + } + + private def taskFailureRateOf(stageData: StageData): Option[Double] = { + // Currently, the calculation doesn't include skipped or active tasks. + val numCompleteTasks = stageData.numCompleteTasks + val numFailedTasks = stageData.numFailedTasks + val numTasks = numCompleteTasks + numFailedTasks + if (numTasks == 0) None else Some(numFailedTasks.toDouble / numTasks.toDouble) + } + + private def averageExecutorRuntimeAndSeverityOf(stageData: StageData): (Long, Severity) = { + val averageExecutorRuntime = stageData.executorRunTime / executorInstances + (averageExecutorRuntime, stageRuntimeMillisSeverityThresholds.severityOf(averageExecutorRuntime)) + } + } + + def minutesSeverityThresholdsToMillisSeverityThresholds( + minutesSeverityThresholds: SeverityThresholds + ): SeverityThresholds = SeverityThresholds( + Duration(minutesSeverityThresholds.low.longValue, duration.MINUTES).toMillis, + Duration(minutesSeverityThresholds.moderate.longValue, duration.MINUTES).toMillis, + Duration(minutesSeverityThresholds.severe.longValue, duration.MINUTES).toMillis, + Duration(minutesSeverityThresholds.critical.longValue, duration.MINUTES).toMillis, + minutesSeverityThresholds.ascending + ) +} diff --git a/app/com/linkedin/drelephant/util/InfoExtractor.java b/app/com/linkedin/drelephant/util/InfoExtractor.java new file mode 100644 index 000000000..a81de19d9 --- /dev/null +++ b/app/com/linkedin/drelephant/util/InfoExtractor.java @@ -0,0 +1,299 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.util; + +import com.linkedin.drelephant.analysis.HadoopApplicationData; +import com.linkedin.drelephant.configurations.scheduler.SchedulerConfiguration; +import com.linkedin.drelephant.configurations.scheduler.SchedulerConfigurationData; +import com.linkedin.drelephant.exceptions.WorkflowClient; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.schedulers.Scheduler; +import com.linkedin.drelephant.spark.data.SparkApplicationData; + +import java.lang.reflect.InvocationTargetException; +import java.util.HashSet; +import java.util.List; +import java.util.Properties; +import java.util.Set; +import java.util.Map; + +import org.apache.commons.lang.StringUtils; +import org.apache.log4j.Logger; +import org.w3c.dom.Document; + +import models.AppResult; +import scala.Option; +import scala.Some; + + +/** + * InfoExtractor is responsible for retrieving information and context about a + * job from the job's configuration + */ +public class InfoExtractor { + private static final Logger logger = Logger.getLogger(InfoExtractor.class); + private static final String SPARK_EXTRA_JAVA_OPTIONS = "spark.driver.extraJavaOptions"; + + private static final String SCHEDULER_CONF = "SchedulerConf.xml"; + + private static final List _configuredSchedulers; + + /** + * Load all the schedulers configured in SchedulerConf.xml + */ + static { + Document document = Utils.loadXMLDoc(SCHEDULER_CONF); + _configuredSchedulers = new SchedulerConfiguration(document.getDocumentElement()).getSchedulerConfigurationData(); + for (SchedulerConfigurationData data : _configuredSchedulers) { + logger.info(String.format("Load Scheduler %s with class : %s", data.getSchedulerName(), data.getClassName())); + } + } + + /** + * Find the scheduler which scheduled the job. + * + * @param appId The application id + * @param properties The application properties + * @return the corresponding Scheduler which scheduled the job. + */ + public static Scheduler getSchedulerInstance(String appId, Properties properties) { + if (properties != null) { + for (SchedulerConfigurationData data : _configuredSchedulers) { + try { + Class schedulerClass = Class.forName(data.getClassName()); + Object instance = + schedulerClass.getConstructor(String.class, Properties.class, SchedulerConfigurationData.class) + .newInstance(appId, properties, data); + if (!(instance instanceof Scheduler)) { + throw new IllegalArgumentException("Class " + schedulerClass.getName() + " is not an implementation of " + Scheduler.class.getName()); + } + Scheduler scheduler = (Scheduler) instance; + if (!scheduler.isEmpty()) { + return scheduler; + } + } catch (ClassNotFoundException e) { + throw new RuntimeException("Could not find class " + data.getClassName(), e); + } catch (InstantiationException e) { + throw new RuntimeException("Could not instantiate class " + data.getClassName(), e); + } catch (IllegalAccessException e) { + throw new RuntimeException("Could not access constructor for class" + data.getClassName(), e); + } catch (RuntimeException e) { + throw new RuntimeException(data.getClassName() + " is not a valid Scheduler class.", e); + } catch (InvocationTargetException e) { + throw new RuntimeException("Could not invoke class " + data.getClassName(), e); + } catch (NoSuchMethodException e) { + throw new RuntimeException("Could not find constructor for class " + data.getClassName(), e); + } + } + } + return null; + } + + /** + * Loads result with the info depending on the application type + * + * @param result The jobResult to be loaded with. + * @param data The Hadoop application data + */ + public static void loadInfo(AppResult result, HadoopApplicationData data) { + Properties properties = new Properties(); + if( data instanceof MapReduceApplicationData) { + properties = retrieveMapreduceProperties((MapReduceApplicationData) data); + } else if ( data instanceof SparkApplicationData) { + properties = retrieveSparkProperties((SparkApplicationData) data); + } + Scheduler scheduler = getSchedulerInstance(data.getAppId(), properties); + + if (scheduler == null) { + logger.info("No Scheduler found for appid: " + data.getAppId()); + loadNoSchedulerInfo(result); + } else if (StringUtils.isEmpty(scheduler.getJobDefId()) || StringUtils.isEmpty(scheduler.getJobExecId()) + || StringUtils.isEmpty(scheduler.getFlowDefId()) || StringUtils.isEmpty(scheduler.getFlowExecId())) { + logger.warn("This job doesn't have the correct " + scheduler.getSchedulerName() + " integration support. I" + + " will treat this as an adhoc job"); + logger.info("No Flow/job info found for appid: " + data.getAppId()); + loadNoSchedulerInfo(result); + } else { + loadSchedulerInfo(result, data, scheduler); + } + } + + /** + * Retrieve the spark properties from SPARK_EXTRA_JAVA_OPTIONS + * + * @param appData the Spark Application Data + * @return The retrieved Spark properties + */ + public static Properties retrieveSparkProperties(SparkApplicationData appData) { + Option prop = appData.appConfigurationProperties().get(SPARK_EXTRA_JAVA_OPTIONS); + Properties properties = new Properties(); + if (prop.isDefined()) { + try { + Map javaOptions = Utils.parseJavaOptions(prop.get()); + for (String key : javaOptions.keySet()) { + properties.setProperty(key, unescapeString(javaOptions.get(key))); + } + } catch (IllegalArgumentException e) { + logger.error("Encountered error while parsing java options into urls: " + e.getMessage()); + } + } else { + logger.error("Unable to retrieve the scheduler info for application [" + + appData.appId() + "]. It does not contain [" + SPARK_EXTRA_JAVA_OPTIONS + "] property in its spark properties."); + } + return properties; + } + + /** + * Retrieve the mapreduce application properties + * @param appData the mapReduce Application Data + * @return the retrieve mapreduce properties + */ + public static Properties retrieveMapreduceProperties(MapReduceApplicationData appData) { + return appData.getConf(); + } + + /** + * Populates the given app result with the info from the given application data and scheduler. + * + * @param result the AppResult to populate + * @param data the HadoopApplicationData to use when populating the result + * @param scheduler the Scheduler to use when populating the result + */ + public static void loadSchedulerInfo(AppResult result, HadoopApplicationData data, Scheduler scheduler) { + String appId = data.getAppId(); + + result.scheduler = Utils.truncateField(scheduler.getSchedulerName(), AppResult.SCHEDULER_LIMIT, appId); + result.workflowDepth = scheduler.getWorkflowDepth(); + + result.jobName = scheduler.getJobName() != null ? Utils + .truncateField(scheduler.getJobName(), AppResult.JOB_NAME_LIMIT, appId) : ""; + + result.jobDefId = Utils.truncateField(scheduler.getJobDefId(), AppResult.URL_LEN_LIMIT, appId); + result.jobDefUrl = scheduler.getJobDefUrl() != null ? Utils + .truncateField(scheduler.getJobDefUrl(), AppResult.URL_LEN_LIMIT, appId) : ""; + + result.jobExecId = Utils.truncateField(scheduler.getJobExecId(), AppResult.URL_LEN_LIMIT, appId); + result.jobExecUrl = scheduler.getJobExecUrl() != null ? Utils + .truncateField(scheduler.getJobExecUrl(), AppResult.URL_LEN_LIMIT, appId) : ""; + + result.flowDefId = Utils.truncateField(scheduler.getFlowDefId(), AppResult.URL_LEN_LIMIT, appId); + result.flowDefUrl = scheduler.getFlowDefUrl() != null ? Utils + .truncateField(scheduler.getFlowDefUrl(), AppResult.URL_LEN_LIMIT, appId) : ""; + + result.flowExecId = Utils.truncateField(scheduler.getFlowExecId(), AppResult.FLOW_EXEC_ID_LIMIT, appId); + result.flowExecUrl = scheduler.getFlowExecUrl() != null ? Utils + .truncateField(scheduler.getFlowExecUrl(), AppResult.URL_LEN_LIMIT, appId) : ""; + } + + /** + * A temporary solution that SPARK 1.2 need to escape '&' with '\&' in its javaOptions. + * This is the reverse process that recovers the escaped string. + * + * @param s The string to unescape + * @return The original string + */ + private static String unescapeString(String s) { + if (s == null) { + return null; + } + return s.replaceAll("\\\\\\&", "\\&"); + } + + /** + * Update the application result with adhoc(not scheduled by a scheduler) information + * + * @param result The AppResult to be udpated + */ + private static void loadNoSchedulerInfo(AppResult result) { + result.scheduler = null; + result.workflowDepth = 0; + result.jobExecId = ""; + result.jobDefId = ""; + result.flowExecId = ""; + result.flowDefId = ""; + result.jobExecUrl = ""; + result.jobDefUrl = ""; + result.flowExecUrl = ""; + result.flowDefUrl = ""; + result.jobName = ""; + } + + /** + * Returns the set of all the schedulers that have been configured for exception analysis + * @return The set of all the schedulers that have been confgured for exception analysis + */ + public static Set getSchedulersConfiguredForException() { + Set schedulersForExceptions = new HashSet(); + for (SchedulerConfigurationData data : _configuredSchedulers) { + if (data.getParamMap().containsKey("exception_enabled") && data.getParamMap().get("exception_enabled") + .equals("true")) { + schedulersForExceptions.add(data.getSchedulerName()); + } + } + return schedulersForExceptions; + } + + /** + * Returns the workflow client instance based on the scheduler name and the workflow url + * @param scheduler The name of the scheduler + * @param url The url of the workflow + * @return The Workflow cient based on the workflow url + */ + public static WorkflowClient getWorkflowClientInstance(String scheduler, String url) { + if (!getSchedulersConfiguredForException().contains(scheduler)) { + throw new RuntimeException(String.format("Scheduler %s is not configured for Exception fingerprinting ", scheduler)); + } + + for (SchedulerConfigurationData data : _configuredSchedulers) { + if (data.getSchedulerName().equals(scheduler)) { + try { + String workflowClass = data.getParamMap().get("exception_class"); + Class schedulerClass = Class.forName(workflowClass); + Object instance = schedulerClass.getConstructor(String.class).newInstance(url); + if (!(instance instanceof WorkflowClient)) { + throw new IllegalArgumentException( + "Class " + schedulerClass.getName() + " is not an implementation of " + WorkflowClient.class.getName()); + } + WorkflowClient workflowClient = (WorkflowClient) instance; + return workflowClient; + } catch (ClassNotFoundException e) { + throw new RuntimeException("Could not find class " + data.getClassName(), e); + } catch (InstantiationException e) { + throw new RuntimeException("Could not instantiate class " + data.getClassName(), e); + } catch (IllegalAccessException e) { + throw new RuntimeException("Could not access constructor for class" + data.getClassName(), e); + } catch (RuntimeException e) { + throw new RuntimeException(data.getClassName() + " is not a valid Scheduler class.", e); + } catch (InvocationTargetException e) { + throw new RuntimeException("Could not invoke class " + data.getClassName(), e); + } catch (NoSuchMethodException e) { + throw new RuntimeException("Could not find constructor for class " + data.getClassName(), e); + } + } + } + return null; + } + + public static SchedulerConfigurationData getSchedulerData(String scheduler) { + for (SchedulerConfigurationData data : _configuredSchedulers) { + if (data.getSchedulerName().equals(scheduler)) { + return data; + } + } + return null; + } +} diff --git a/app/com/linkedin/drelephant/util/MemoryFormatUtils.java b/app/com/linkedin/drelephant/util/MemoryFormatUtils.java new file mode 100644 index 000000000..b32f61fb9 --- /dev/null +++ b/app/com/linkedin/drelephant/util/MemoryFormatUtils.java @@ -0,0 +1,124 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.util; + +import java.text.DecimalFormat; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import org.apache.commons.lang.StringUtils; + + +/** + * This is a utils class that handles memory string parsing and formatting problem. + */ +public class MemoryFormatUtils { + private MemoryFormatUtils() { + // Do nothing, empty on purpose + } + + private static class MemoryUnit { + private final String _name; + private final long _bytes; + + public MemoryUnit(String name, long bytes) { + _name = name; + _bytes = bytes; + } + + public String getName() { + return _name; + } + + public long getBytes() { + return _bytes; + } + + @Override + public String toString() { + return _name; + } + } + + // Units must be in a descent order + private static final MemoryUnit[] UNITS = + new MemoryUnit[]{new MemoryUnit("TB", 1L << 40), new MemoryUnit("GB", 1L << 30), new MemoryUnit("MB", + 1L << 20), new MemoryUnit("KB", 1L << 10), new MemoryUnit("B", 1L)}; + + private static final DecimalFormat FORMATTER = new DecimalFormat("#,##0.##"); + private static final Pattern REGEX_MATCHER = + Pattern.compile("([-+]?\\d*\\.?\\d+(?:[eE][-+]?\\d+)?)\\s*((?:[T|G|M|K])?B?)?", Pattern.CASE_INSENSITIVE); + + /** + * Given a memory value in bytes, convert it to a string with the unit that round to a >0 integer part. + * + * @param value The memory value in long bytes + * @return The formatted string, null if + */ + public static String bytesToString(long value) { + if (value < 0) { + throw new IllegalArgumentException("Invalid memory size: " + value); + } + for (int i = 0; i < UNITS.length; i++) { + long bytes = UNITS[i].getBytes(); + if (value >= bytes) { + double numResult = bytes > 1 ? (double) value / (double) bytes : (double) value; + return FORMATTER.format(numResult) + " " + UNITS[i].getName(); + } + } + return value + " " + UNITS[UNITS.length - 1].getName(); + } + + /** + * Convert a formatted string into a long value in bytes. + * This method handles + * + * @param formattedString The string to convert + * @return The bytes value + */ + public static long stringToBytes(String formattedString) { + if (formattedString == null) { + return 0L; + } + + Matcher matcher = REGEX_MATCHER.matcher(formattedString); + if (!matcher.matches()) { + throw new IllegalArgumentException( + "The formatted string [" + formattedString + "] does not match with the regex /" + REGEX_MATCHER.toString() + + "/"); + } + if (matcher.groupCount() != 1 && matcher.groupCount() != 2) { + throw new IllegalArgumentException(); + } + + double numPart = Double.parseDouble(matcher.group(1)); + if (numPart < 0) { + throw new IllegalArgumentException("The number part of the memory cannot be less than zero: [" + numPart + "]."); + } + String unitPart = matcher.groupCount() == 2 ? matcher.group(2).toUpperCase() : ""; + if (!unitPart.endsWith("B")) { + unitPart += "B"; + } + for (int i = 0; i < UNITS.length; i++) { + if (unitPart.equals(UNITS[i].getName())) { + return (long) (numPart * UNITS[i].getBytes()); + } + } + throw new IllegalArgumentException("The formatted string [" + formattedString + "] 's unit part [" + unitPart + + "] does not match any unit. The supported units are (case-insensitive, and also the 'B' is ignorable): [" + + StringUtils.join(UNITS) + "]."); + } +} diff --git a/app/com/linkedin/drelephant/util/SparkUtils.scala b/app/com/linkedin/drelephant/util/SparkUtils.scala new file mode 100644 index 000000000..3a0354070 --- /dev/null +++ b/app/com/linkedin/drelephant/util/SparkUtils.scala @@ -0,0 +1,62 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.util + +import java.io.{File, FileInputStream, InputStreamReader} +import java.util.Properties + +import scala.collection.JavaConverters + + +trait SparkUtils { + import JavaConverters._ + + def defaultEnv: Map[String, String] + + // Below this line are modified utility methods from + // https://github.com/apache/spark/blob/v1.4.1/core/src/main/scala/org/apache/spark/util/Utils.scala + + /** Return the path of the default Spark properties file. */ + def getDefaultPropertiesFile(env: Map[String, String] = defaultEnv): Option[String] = { + env.get("SPARK_CONF_DIR") + .orElse(env.get("SPARK_HOME").map { t => s"$t${File.separator}conf" }) + .map { t => new File(s"$t${File.separator}spark-defaults.conf")} + .filter(_.isFile) + .map(_.getAbsolutePath) + } + + /** Load properties present in the given file. */ + def getPropertiesFromFile(filename: String): Map[String, String] = { + val file = new File(filename) + require(file.exists(), s"Properties file $file does not exist") + require(file.isFile(), s"Properties file $file is not a normal file") + + val inReader = new InputStreamReader(new FileInputStream(file), "UTF-8") + try { + val properties = new Properties() + properties.load(inReader) + properties.stringPropertyNames().asScala.map( + k => (k, properties.getProperty(k).trim)).toMap + } finally { + inReader.close() + } + } +} + +object SparkUtils extends SparkUtils { + override val defaultEnv = sys.env +} diff --git a/app/com/linkedin/drelephant/util/Utils.java b/app/com/linkedin/drelephant/util/Utils.java index 1a37516eb..e13aca0d9 100644 --- a/app/com/linkedin/drelephant/util/Utils.java +++ b/app/com/linkedin/drelephant/util/Utils.java @@ -1,72 +1,502 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + package com.linkedin.drelephant.util; -import java.util.ArrayList; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.math.Statistics; +import java.io.IOException; +import java.io.InputStream; +import java.text.DecimalFormat; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import javax.script.ScriptEngine; +import javax.script.ScriptEngineManager; +import javax.script.ScriptException; +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import org.apache.hadoop.conf.Configuration; +import models.AppResult; +import org.apache.commons.io.FileUtils; +import org.apache.log4j.Logger; +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; +import org.xml.sax.SAXException; import java.util.List; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.*; -public class Utils { - public static String combineCsvLines(String[] lines) { - StringBuilder sb = new StringBuilder(); - for (String line : lines) { - sb.append(line).append("\n"); - } - return sb.toString().trim(); - } - - public static String createCsvLine(String... parts) { - StringBuilder sb = new StringBuilder(); - String quotes = "\""; - String comma = ","; - for (int i = 0; i < parts.length; i++) { - sb.append(quotes).append(parts[i].replaceAll(quotes, quotes + quotes)).append(quotes); - if (i != parts.length - 1) { - sb.append(comma); - } - } - return sb.toString(); +/** + * This class contains all the utility methods. + */ +public final class Utils { + private static final Logger logger = Logger.getLogger(Utils.class); + + private static final String TRUNCATE_SUFFIX = "..."; + + private Utils() { + // do nothing + } + + /** + * Given a mapreduce job's application id, get its corresponding job id + * + * @param appId The application id of the job + * @return the corresponding job id + */ + public static String getJobIdFromApplicationId(String appId) { + return appId.replaceAll("application", "job"); + } + + /** + * Load an XML document from a file path + * + * @param filePath The file path to load + * @return The loaded Document object + */ + public static Document loadXMLDoc(String filePath) { + InputStream instream = null; + logger.info("Loading configuration file " + filePath); + instream = ClassLoader.getSystemClassLoader().getResourceAsStream(filePath); + + if (instream == null) { + logger.info("Configuation file not present in classpath. File: " + filePath); + throw new RuntimeException("Unable to read " + filePath); + } + logger.info("Configuation file loaded. File: " + filePath); + + Document document = null; + try { + DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); + DocumentBuilder builder = factory.newDocumentBuilder(); + document = builder.parse(instream); + } catch (ParserConfigurationException e) { + throw new RuntimeException("XML Parser could not be created.", e); + } catch (SAXException e) { + throw new RuntimeException(filePath + " is not properly formed", e); + } catch (IOException e) { + throw new RuntimeException("Unable to read " + filePath, e); } - public static String[][] parseCsvLines(String data) { - if (data.isEmpty()) { - return new String[0][]; + return document; + } + + /** + * Parse a java option string in the format of "-Dfoo=bar -Dfoo2=bar ..." into a {optionName -> optionValue} map. + * + * @param str The option string to parse + * @return A map of options + */ + public static Map parseJavaOptions(String str) { + Map options = new HashMap(); + String[] tokens = str.trim().split("\\s"); + for (String token : tokens) { + if (token.isEmpty() || token.startsWith("-X")) { + continue; + } + if (!token.startsWith("-D")) { + throw new IllegalArgumentException( + "Cannot parse java option string [" + str + "]. Some options does not begin with -D prefix."); + } + String[] parts = token.substring(2).split("=", 2); + if (parts.length != 2) { + throw new IllegalArgumentException( + "Cannot parse java option string [" + str + "]. The part [" + token + "] does not contain a =."); + } + + options.put(parts[0], parts[1]); + } + return options; + } + + /** + * Returns the configured thresholds after evaluating and verifying the levels. + * + * @param rawLimits A comma separated string of threshold limits + * @param thresholdLevels The number of threshold levels + * @return The evaluated threshold limits + */ + public static double[] getParam(String rawLimits, int thresholdLevels) { + double[] parsedLimits = null; + + if (rawLimits != null && !rawLimits.isEmpty()) { + String[] thresholds = rawLimits.split(","); + if (thresholds.length != thresholdLevels) { + logger.error("Could not find " + thresholdLevels + " threshold levels in " + rawLimits); + parsedLimits = null; + } else { + // Evaluate the limits + parsedLimits = new double[thresholdLevels]; + ScriptEngineManager mgr = new ScriptEngineManager(null); + ScriptEngine engine = mgr.getEngineByName("JavaScript"); + for (int i = 0; i < thresholdLevels; i++) { + try { + parsedLimits[i] = Double.parseDouble(engine.eval(thresholds[i]).toString()); + } catch (ScriptException e) { + logger.error("Could not evaluate " + thresholds[i] + " in " + rawLimits); + parsedLimits = null; + } } - String[] lines = data.split("\n"); - String[][] result = new String[lines.length][]; - for (int i = 0; i < lines.length; i++) { - result[i] = parseCsvLine(lines[i]); + } + } + + return parsedLimits; + } + + /** + * Combine the parts into a comma separated String + * + * Example: + * input: part1 = "foo" and part2 = "bar" + * output = "foo,bar" + * + * @param parts The parts to combine + * @return The comma separated string + */ + public static String commaSeparated(String... parts) { + StringBuilder sb = new StringBuilder(); + String comma = ","; + if (parts.length != 0) { + sb.append(parts[0]); + } + for (int i = 1; i < parts.length; i++) { + if (parts[i] != null && !parts[i].isEmpty()) { + sb.append(comma); + sb.append(parts[i]); + } + } + return sb.toString(); + } + + /** + * Compute the score for the heuristic based on the number of tasks and severity. + * This is applicable only to mapreduce applications. + * + * Score = severity * num of tasks (where severity NOT in [NONE, LOW]) + * + * @param severity The heuristic severity + * @param tasks The number of tasks (map/reduce) + * @return + */ + public static int getHeuristicScore(Severity severity, int tasks) { + int score = 0; + if (severity != Severity.NONE && severity != Severity.LOW) { + score = severity.getValue() * tasks; + } + return score; + } + + /** + * Parse a comma separated string of key-value pairs into a {property -> value} Map. + * e.g. string format: "foo1=bar1,foo2=bar2,foo3=bar3..." + * + * @param str The comma separated, key-value pair string to parse + * @return A map of properties + */ + public static Map parseCsKeyValue(String str) { + Map properties = new HashMap(); + String[] tokens = null; + if (str != null) { + tokens = str.trim().split(","); + } + for (String token : tokens) { + if (!token.isEmpty()) { + String[] parts = token.split("=", 2); + if (parts.length == 2) { + properties.put(parts[0], parts[1]); } - return result; - } - - public static String[] parseCsvLine(String line) { - List store = new ArrayList(); - StringBuilder curVal = new StringBuilder(); - boolean inquotes = false; - for (int i = 0; i < line.length(); i++) { - char ch = line.charAt(i); - if (inquotes) { - if (ch == '\"') { - inquotes = false; - } else { - curVal.append(ch); - } - } else { - if (ch == '\"') { - inquotes = true; - if (curVal.length() > 0) { - //if this is the second quote in a value, add a quote - //this is for the double quote in the middle of a value - curVal.append('\"'); - } - } else if (ch == ',') { - store.add(curVal.toString()); - curVal = new StringBuilder(); - } else { - curVal.append(ch); - } - } + } + } + return properties; + } + + /** + * Truncate the field by the specified limit + * + * @param field the field to br truncated + * @param limit the truncation limit + * @param context additional context for logging purposes + * @return The truncated field + */ + public static String truncateField(String field, int limit, String context) { + if (field != null && limit > TRUNCATE_SUFFIX.length() && field.length() > limit) { + logger.info("Truncating " + field + " to " + limit + " characters for " + context); + field = field.substring(0, limit - 3) + "..."; + } + return field; + } + + /** + * Convert a millisecond duration to a string format + * + * @param millis A duration to convert to a string form + * @return A string of the form "X:Y:Z Hours". + */ + public static String getDurationBreakdown(long millis) { + + long hours = TimeUnit.MILLISECONDS.toHours(millis); + millis -= TimeUnit.HOURS.toMillis(hours); + long minutes = TimeUnit.MILLISECONDS.toMinutes(millis); + millis -= TimeUnit.MINUTES.toMillis(minutes); + long seconds = TimeUnit.MILLISECONDS.toSeconds(millis); + + return String.format("%d:%02d:%02d", hours, minutes, seconds); + } + + /** + * Convert a value in MBSeconds to GBHours + * @param MBSeconds The value to convert + * @return A double of the value in GB Hours unit + */ + public static double MBSecondsToGBHours(long MBSeconds) { + double GBseconds = (double) MBSeconds / (double) FileUtils.ONE_KB; + double GBHours = GBseconds / Statistics.HOUR; + return GBHours; + } + /** + * Convert a value in MBSeconds to GBHours + * @param MBSeconds The value to convert + * @return A string of form a.xyz GB Hours + */ + public static String getResourceInGBHours(long MBSeconds) { + + if (MBSeconds == 0) { + return "0 GB Hours"; + } + + double GBHours = MBSecondsToGBHours(MBSeconds); + if ((long) (GBHours * 1000) == 0) { + return "0 GB Hours"; + } + + DecimalFormat df = new DecimalFormat("0.000"); + String GBHoursString = df.format(GBHours); + GBHoursString = GBHoursString + " GB Hours"; + return GBHoursString; + } + + /** + * Find percentage of numerator of denominator + * @param numerator The numerator + * @param denominator The denominator + * @return The percentage string of the form `x.yz %` + */ + public static String getPercentage(long numerator, long denominator) { + + if (denominator == 0) { + return "NaN"; + } + + double percentage = ((double) numerator / (double) denominator) * 100; + + if ((long) (percentage) == 0) { + return "0 %"; + } + + DecimalFormat df = new DecimalFormat("0.00"); + return df.format(percentage).concat(" %"); + } + + /** + * Checks if the property is set + * + * @param property The property to tbe checked. + * @return true if set, false otherwise + */ + public static boolean isSet(String property) { + return property != null && !property.isEmpty(); + } + + /** + * Get non negative int value from Configuration. + * + * If the value is not set or not an integer, the provided default value is returned. + * If the value is negative, 0 is returned. + * + * @param conf Configuration to be extracted + * @param key property name + * @param defaultValue default value + * @return non negative int value + */ + public static int getNonNegativeInt(Configuration conf, String key, int defaultValue) { + try { + int value = conf.getInt(key, defaultValue); + if (value < 0) { + value = 0; + logger.warn("Configuration " + key + " is negative. Resetting it to 0"); + } + return value; + } catch (NumberFormatException e) { + logger.error("Invalid configuration " + key + ". Value is " + conf.get(key) + + ". Resetting it to default value: " + defaultValue); + return defaultValue; + } + } + + /** + * Get non negative long value from Configuration. + * + * If the value is not set or not a long, the provided default value is returned. + * If the value is negative, 0 is returned. + * + * @param conf Configuration to be extracted + * @param key property name + * @param defaultValue default value + * @return non negative long value + */ + public static long getNonNegativeLong(Configuration conf, String key, long defaultValue) { + try { + long value = conf.getLong(key, defaultValue); + if (value < 0) { + value = 0; + logger.warn("Configuration " + key + " is negative. Resetting it to 0"); + } + return value; + } catch (NumberFormatException e) { + logger.error("Invalid configuration " + key + ". Value is " + conf.get(key) + + ". Resetting it to default value: " + defaultValue); + return defaultValue; + } + } + + /** + * Return the formatted string unless one of the args is null in which case null is returned + * + * @param formatString the standard Java format string + * @param args objects to put in the format string + * @return formatted String or null + */ + public static String formatStringOrNull(String formatString, Object... args) { + for (Object o : args) { + if (o == null) { + return null; + } + } + return String.format(formatString, args); + } + + /** + * Given a configuration element, extract the params map. + * + * @param confElem the configuration element + * @return the params map or an empty map if one can't be found + */ + public static Map getConfigurationParameters(Element confElem) { + Map paramsMap = new HashMap(); + Node paramsNode = confElem.getElementsByTagName("params").item(0); + if (paramsNode != null) { + NodeList paramsList = paramsNode.getChildNodes(); + for (int j = 0; j < paramsList.getLength(); j++) { + Node paramNode = paramsList.item(j); + if (paramNode != null && !paramsMap.containsKey(paramNode.getNodeName())) { + paramsMap.put(paramNode.getNodeName(), paramNode.getTextContent()); } - store.add(curVal.toString()); - return store.toArray(new String[store.size()]); + } + } + return paramsMap; + } + + /* Returns the total resources used by the job list + * @param resultList The job lsit + * @return The total resources used by the job list + */ + public static long getTotalResources(List resultList) { + long totalResources = 0; + for (AppResult result : resultList) { + totalResources += result.resourceUsed; + } + return totalResources; + } + + /** + * Returns the total wasted resource of the job list + * @param resultList The list of the jobs + * @return the total wasted resources of the job list + */ + public static long getTotalWastedResources(List resultList) { + long totalWastedResources = 0; + for (AppResult result : resultList) { + totalWastedResources += result.resourceWasted; + } + return totalWastedResources; + } + + /** + * Returns the total runtime of the job list i.e. last finished job - first started job + * @param mrJobsList The total runtime of the job list + * @return The total runtime of the job list + */ + public static long getTotalRuntime(List mrJobsList) { + long lastFinished = 0; + long firstStarted = Long.MAX_VALUE; + + for (AppResult result : mrJobsList) { + if (result.finishTime > lastFinished) { + lastFinished = result.finishTime; + } + if (result.startTime < firstStarted) { + firstStarted = result.startTime; + } + } + + return lastFinished - firstStarted; + } + + /** + * Returns the total waittime of the job list. The total waittime is calculated by first finding + * the longest trail of non overlapping jobs which includes the last finished job. Then we add the delay for + * all the jobs in the trail and the difference in start and finish time between subsequent jobs of the + * trail. + * @param mrJobsList The job list + * @return The total wait time of the joblist + */ + public static long getTotalWaittime(List mrJobsList) { + long totalWaittime = 0; + + if (mrJobsList.size() == 1) { + return mrJobsList.get(0).totalDelay; + } + + List finishedTimesSorted = new ArrayList(mrJobsList); + + // sort the jobs in reverse order of finished times. + Collections.sort(finishedTimesSorted, new Comparator() { + @Override + public int compare(AppResult a, AppResult b) { + return (int) (b.finishTime - a.finishTime); + } + }); + + // add delay of the lastfinished job + totalWaittime += finishedTimesSorted.get(0).totalDelay; + + for (int i = 1; i < finishedTimesSorted.size(); i++) { + if (finishedTimesSorted.get(i).finishTime < finishedTimesSorted.get(i-1).startTime) { + // add delay between the finishtime of current job and start time of just previous finished job + totalWaittime += finishedTimesSorted.get(i-1).startTime - finishedTimesSorted.get(i).finishTime; + // add delay in the current job + totalWaittime += finishedTimesSorted.get(i).totalDelay; + } } + return totalWaittime; + } } diff --git a/app/com/linkedin/drelephant/util/YarnURLUtils.java b/app/com/linkedin/drelephant/util/YarnURLUtils.java new file mode 100644 index 000000000..1c34a96ac --- /dev/null +++ b/app/com/linkedin/drelephant/util/YarnURLUtils.java @@ -0,0 +1,69 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.util; + +import java.net.MalformedURLException; +import java.net.URL; + + +/** + * This provide URLs for YARN APIs. + */ +public class YarnURLUtils { + private static final String MAPREDUCE_JOBS_PATH = "/ws/v1/history/mapreduce/jobs"; + private static final String JOB_DETAIL_PATH = "/jobhistory/job"; + + public static String getMapreduceJobsURLString(String historyServerRoot) { + return "http://" + historyServerRoot + "/ws/v1/history/mapreduce/jobs"; + } + + public static URL getMapreduceJobsURL(String historyServerRoot, long startTime, long endTime) + throws MalformedURLException { + return new URL(String.format("%s?finishedTimeBegin=%s&finishedTimeEnd=%s&state=SUCCEEDED", + getMapreduceJobsURLString(historyServerRoot), startTime, endTime)); + } + + public static String getJobDetailURLString(String historyServerRoot, String jobId) { + return historyServerRoot + "/jobhistory/job/" + jobId; + } + + public static URL getJobConfigURL(String historyServerRoot, String jobId) + throws MalformedURLException { + return new URL(getMapreduceJobsURLString(historyServerRoot) + "/" + jobId + "/conf"); + } + + public static URL getJobCounterURL(String historyServerRoot, String jobId) + throws MalformedURLException { + return new URL(getMapreduceJobsURLString(historyServerRoot) + "/" + jobId + "/counters"); + } + + public static URL getTaskListURL(String historyServerRoot, String jobId) + throws MalformedURLException { + return new URL(getMapreduceJobsURLString(historyServerRoot) + "/" + jobId + "/tasks"); + } + + public static URL getTaskCounterURL(String historyServerRoot, String jobId, String taskId) + throws MalformedURLException { + return new URL(getMapreduceJobsURLString(historyServerRoot) + "/" + jobId + "/tasks/" + taskId + "/counters"); + } + + public static URL getTaskAttemptURL(String historyServerRoot, String jobId, String taskId, String attemptId) + throws MalformedURLException { + return new URL( + getMapreduceJobsURLString(historyServerRoot) + "/" + jobId + "/tasks/" + taskId + "/attempts/" + attemptId); + } +} diff --git a/app/controllers/Application.java b/app/controllers/Application.java index 86be3d406..7da12d1c9 100644 --- a/app/controllers/Application.java +++ b/app/controllers/Application.java @@ -1,260 +1,1563 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + package controllers; +import com.avaje.ebean.ExpressionList; +import com.avaje.ebean.Query; + +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import com.linkedin.drelephant.ElephantContext; +import com.linkedin.drelephant.analysis.Metrics; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.util.Utils; + import java.text.ParseException; import java.text.SimpleDateFormat; +import java.util.*; import java.util.ArrayList; -import java.util.Calendar; -import java.util.Date; +import java.util.Comparator; import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.TreeSet; +import models.AppHeuristicResult; +import models.AppResult; -import model.JobResult; -import views.html.*; - +import org.apache.commons.collections.map.ListOrderedMap; +import org.apache.http.client.utils.URLEncodedUtils; +import org.apache.http.message.BasicNameValuePair; +import org.apache.log4j.Logger; import play.api.templates.Html; import play.data.DynamicForm; import play.data.Form; import play.libs.Json; import play.mvc.Controller; import play.mvc.Result; +import views.html.help.metrics.helpRuntime; +import views.html.help.metrics.helpWaittime; +import views.html.help.metrics.helpUsedResources; +import views.html.help.metrics.helpWastedResources; +import views.html.index; +import views.html.page.comparePage; +import views.html.page.flowHistoryPage; +import views.html.page.helpPage; +import views.html.page.homePage; +import views.html.page.jobHistoryPage; +import views.html.page.searchPage; +import views.html.results.compareResults; +import views.html.results.flowDetails; +import views.html.results.oldFlowHistoryResults; +import views.html.results.jobDetails; +import views.html.results.oldJobHistoryResults; +import views.html.results.oldFlowMetricsHistoryResults; +import views.html.results.oldJobMetricsHistoryResults; +import views.html.results.searchResults; + +import views.html.page.oldFlowHistoryPage; +import views.html.page.oldJobHistoryPage; +import views.html.results.jobHistoryResults; +import views.html.results.flowHistoryResults; +import views.html.results.flowMetricsHistoryResults; +import views.html.results.jobMetricsHistoryResults; +import views.html.page.oldHelpPage; + +import com.google.gson.*; -import com.avaje.ebean.ExpressionList; -import com.linkedin.drelephant.ElephantAnalyser; -import com.linkedin.drelephant.analysis.Severity; -import com.linkedin.drelephant.analysis.heuristics.MapperDataSkewHeuristic; -import com.linkedin.drelephant.analysis.heuristics.MapperInputSizeHeuristic; -import com.linkedin.drelephant.analysis.heuristics.MapperSpeedHeuristic; -import com.linkedin.drelephant.analysis.heuristics.ReducerDataSkewHeuristic; -import com.linkedin.drelephant.analysis.heuristics.ReducerTimeHeuristic; -import com.linkedin.drelephant.analysis.heuristics.ShuffleSortHeuristic; public class Application extends Controller { + private static final Logger logger = Logger.getLogger(Application.class); private static final long DAY = 24 * 60 * 60 * 1000; private static final long FETCH_DELAY = 60 * 1000; - private static long lastFetch = 0; - private static int numJobsAnalyzed = 0; - private static int numJobsCritical = 0; - private static int numJobsSevere = 0; + private static final int PAGE_LENGTH = 20; // Num of jobs in a search page + private static final int PAGE_BAR_LENGTH = 5; // Num of pages shown in the page bar + private static final int REST_PAGE_LENGTH = 100; // Num of jobs in a rest search page + private static final int JOB_HISTORY_LIMIT = 5000; // Set to avoid memory error. + private static final int MAX_HISTORY_LIMIT = 15; // Upper limit on the number of executions to display + private static final int STAGE_LIMIT = 25; // Upper limit on the number of stages to display + + // Form and Rest parameters + public static final String APP_ID = "id"; + public static final String FLOW_DEF_ID = "flow-def-id"; + public static final String FLOW_EXEC_ID = "flow-exec-id"; + public static final String JOB_DEF_ID = "job-def-id"; + public static final String USERNAME = "username"; + public static final String QUEUE_NAME = "queue-name"; + public static final String SEVERITY = "severity"; + public static final String JOB_TYPE = "job-type"; + public static final String ANALYSIS = "analysis"; + public static final String STARTED_TIME_BEGIN = "started-time-begin"; + public static final String STARTED_TIME_END = "started-time-end"; + public static final String FINISHED_TIME_BEGIN = "finished-time-begin"; + public static final String FINISHED_TIME_END = "finished-time-end"; + public static final String COMPARE_FLOW_ID1 = "flow-exec-id1"; + public static final String COMPARE_FLOW_ID2 = "flow-exec-id2"; + public static final String PAGE = "page"; + + private enum Version {OLD,NEW}; + + // Configuration properties + private static final String SEARCH_MATCHES_PARTIAL_CONF = "drelephant.application.search.match.partial"; + + private static long _lastFetch = 0; + private static int _numJobsAnalyzed = 0; + private static int _numJobsCritical = 0; + private static int _numJobsSevere = 0; + + + /** + * Serves the initial index.html page for the new user interface. This page contains the whole web app + */ + public static Result serveAsset(String path) { + return ok(index.render()); + } + + /** + * Controls the Home page of Dr. Elephant. + * + * Displays the latest jobs which were analysed in the last 24 hours. + */ + public static Result dashboard() { + long now = System.currentTimeMillis(); + long finishDate = now - DAY; + + // Update statistics only after FETCH_DELAY + if (now - _lastFetch > FETCH_DELAY) { + _numJobsAnalyzed = AppResult.find.where().gt(AppResult.TABLE.FINISH_TIME, finishDate).findRowCount(); + _numJobsCritical = AppResult.find.where() + .gt(AppResult.TABLE.FINISH_TIME, finishDate) + .eq(AppResult.TABLE.SEVERITY, Severity.CRITICAL.getValue()) + .findRowCount(); + _numJobsSevere = AppResult.find.where() + .gt(AppResult.TABLE.FINISH_TIME, finishDate) + .eq(AppResult.TABLE.SEVERITY, Severity.SEVERE.getValue()) + .findRowCount(); + _lastFetch = now; + } + + // Fetch only required fields for jobs analysed in the last 24 hours up to a max of 50 jobs + List results = AppResult.find.select(AppResult.getSearchFields()) + .where() + .gt(AppResult.TABLE.FINISH_TIME, finishDate) + .order() + .desc(AppResult.TABLE.FINISH_TIME) + .setMaxRows(50) + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, AppHeuristicResult.getSearchFields()) + .findList(); + + return ok(homePage.render(_numJobsAnalyzed, _numJobsSevere, _numJobsCritical, + searchResults.render("Latest analysis", results))); + } + + /** + * Returns the scheduler info id/url pair for the most recent app result that has an id like value + * (which can use % and _ SQL wild cards) for the specified field. Note that this is a pair rather + * than merely an ID/URL because for some schedulers (e.g. Airflow) they are not equivalent and + * usually the UI wants to display the ID with a link to the URL. While it is true that the URL + * can probably be derived from the ID in most cases, we would need scheduler specific logic which + * would be a mess. + */ + private static IdUrlPair bestSchedulerInfoMatchLikeValue(String value, String schedulerIdField) { + String schedulerUrlField; + if (schedulerIdField.equals(AppResult.TABLE.FLOW_DEF_ID)) { + schedulerUrlField = AppResult.TABLE.FLOW_DEF_URL; + } else if (schedulerIdField.equals(AppResult.TABLE.FLOW_EXEC_ID)) { + schedulerUrlField = AppResult.TABLE.FLOW_EXEC_URL; + } else if (schedulerIdField.equals(AppResult.TABLE.JOB_DEF_ID)) { + schedulerUrlField = AppResult.TABLE.JOB_DEF_URL; + } else if (schedulerIdField.equals(AppResult.TABLE.JOB_EXEC_ID)) { + schedulerUrlField = AppResult.TABLE.JOB_EXEC_URL; + } else { + throw new RuntimeException(String.format("%s is not a valid scheduler info id field", schedulerIdField)); + } + AppResult result = AppResult.find + .select(String.format("%s, %s", schedulerIdField, schedulerUrlField)) + .where().like(schedulerIdField, value) + .order() + .desc(AppResult.TABLE.FINISH_TIME) + .setMaxRows(1) + .findUnique(); + if (result != null) { + if (schedulerIdField.equals(AppResult.TABLE.FLOW_DEF_ID)) { + return new IdUrlPair(result.flowDefId, result.flowDefUrl); + } else if (schedulerIdField.equals(AppResult.TABLE.FLOW_EXEC_ID)) { + return new IdUrlPair(result.flowExecId, result.flowExecUrl); + } else if (schedulerIdField.equals(AppResult.TABLE.JOB_DEF_ID)) { + return new IdUrlPair(result.jobDefId, result.jobDefUrl); + } else if (schedulerIdField.equals(AppResult.TABLE.JOB_EXEC_ID)) { + return new IdUrlPair(result.jobExecId, result.jobExecUrl); + } + } + return null; + } + + /** + * Given a (possibly) partial scheduler info id, try to find the closest existing id. + */ + private static IdUrlPair bestSchedulerInfoMatchGivenPartialId(String partialSchedulerInfoId, String schedulerInfoIdField) { + IdUrlPair schedulerInfoPair; + // check for exact match + schedulerInfoPair = bestSchedulerInfoMatchLikeValue(partialSchedulerInfoId, schedulerInfoIdField); + // check for suffix match if feature isn't disabled + if (schedulerInfoPair == null && ElephantContext.instance().getGeneralConf().getBoolean(SEARCH_MATCHES_PARTIAL_CONF, true)) { + schedulerInfoPair = bestSchedulerInfoMatchLikeValue(String.format("%s%%", partialSchedulerInfoId), schedulerInfoIdField); + } + // if we didn't find anything just give a buest guess + if (schedulerInfoPair == null) { + schedulerInfoPair = new IdUrlPair(partialSchedulerInfoId, ""); + } + return schedulerInfoPair; + } + + /** + * Controls the Search Feature + */ public static Result search() { DynamicForm form = Form.form().bindFromRequest(request()); - String jobId = form.get("jobid"); - String username = form.get("user"); - String severity = form.get("severity"); - String jobtype = form.get("jobtype"); - String analysis = form.get("analysis"); - String dateStart = form.get("start-date"); - String dateEnd = form.get("end-date"); - SimpleDateFormat dateFormat = new SimpleDateFormat("MM/dd/yyyy"); - if (jobId != null && !jobId.isEmpty()) { - JobResult result = JobResult.find.byId(jobId); - if (result != null) { - return ok(search.render(singlejob.render(result))); - } else { - return ok(search.render(singlejob.render(null))); + String appId = form.get(APP_ID); + appId = appId != null ? appId.trim() : ""; + if (appId.contains("job")) { + appId = appId.replaceAll("job", "application"); + } + String partialFlowExecId = form.get(FLOW_EXEC_ID); + partialFlowExecId = (partialFlowExecId != null) ? partialFlowExecId.trim() : null; + + // Search and display job details when job id or flow execution url is provided. + if (!appId.isEmpty()) { + AppResult result = AppResult.find.select("*") + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, "*") + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS + "." + AppHeuristicResult.TABLE.APP_HEURISTIC_RESULT_DETAILS, + "*") + .where() + .idEq(appId).findUnique(); + return ok(searchPage.render(null, jobDetails.render(result))); + } else if (Utils.isSet(partialFlowExecId)) { + IdUrlPair flowExecPair = bestSchedulerInfoMatchGivenPartialId(partialFlowExecId, AppResult.TABLE.FLOW_EXEC_ID); + List results = AppResult.find + .select(AppResult.getSearchFields() + "," + AppResult.TABLE.JOB_EXEC_ID) + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, AppHeuristicResult.getSearchFields()) + .where() + .eq(AppResult.TABLE.FLOW_EXEC_ID, flowExecPair.getId()) + .findList(); + Map> map = ControllerUtil.groupJobs(results, ControllerUtil.GroupBy.JOB_EXECUTION_ID); + return ok(searchPage.render(null, flowDetails.render(flowExecPair, map))); + } + + // Prepare pagination of results + PaginationStats paginationStats = new PaginationStats(PAGE_LENGTH, PAGE_BAR_LENGTH); + int pageLength = paginationStats.getPageLength(); + paginationStats.setCurrentPage(1); + final Map searchString = request().queryString(); + if (searchString.containsKey(PAGE)) { + try { + paginationStats.setCurrentPage(Integer.parseInt(searchString.get(PAGE)[0])); + } catch (NumberFormatException ex) { + logger.error("Error parsing page number. Setting current page to 1."); + paginationStats.setCurrentPage(1); } + } + int currentPage = paginationStats.getCurrentPage(); + int paginationBarStartIndex = paginationStats.getPaginationBarStartIndex(); + + // Filter jobs by search parameters + Query query = generateSearchQuery(AppResult.getSearchFields(), getSearchParams()); + List results = query.setFirstRow((paginationBarStartIndex - 1) * pageLength) + .setMaxRows((paginationStats.getPageBarLength() - 1) * pageLength + 1) + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, AppHeuristicResult.getSearchFields()) + .findList(); + paginationStats.setQueryString(getQueryString()); + if (results.isEmpty() || currentPage > paginationStats.computePaginationBarEndIndex(results.size())) { + return ok(searchPage.render(null, jobDetails.render(null))); } else { - ExpressionList query = JobResult.find.where(); - if (username != null && !username.isEmpty()) { - query = query.ilike("username", username); + List resultsToDisplay = results.subList((currentPage - paginationBarStartIndex) * pageLength, + Math.min(results.size(), (currentPage - paginationBarStartIndex + 1) * pageLength)); + return ok(searchPage.render(paginationStats, searchResults.render( + String.format("Results: Showing %,d of %,d", resultsToDisplay.size(), query.findRowCount()), resultsToDisplay))); + } + } + + /** + * Parses the request for the queryString + * + * @return URL Encoded String of Parameter Value Pair + */ + public static String getQueryString() { + List fields = new LinkedList(); + final Set> entries = request().queryString().entrySet(); + for (Map.Entry entry : entries) { + final String key = entry.getKey(); + final String value = entry.getValue()[0]; + if (!key.equals(PAGE)) { + fields.add(new BasicNameValuePair(key, value)); } - if (jobtype != null && !jobtype.isEmpty()) { - query = query.eq("job_type", jobtype); + } + if (fields.isEmpty()) { + return null; + } else { + return URLEncodedUtils.format(fields, "utf-8"); + } + } + + public static Map getSearchParams() { + Map searchParams = new HashMap(); + + DynamicForm form = Form.form().bindFromRequest(request()); + String username = form.get(USERNAME); + username = username != null ? username.trim().toLowerCase() : null; + searchParams.put(USERNAME, username); + String queuename = form.get(QUEUE_NAME); + queuename = queuename != null ? queuename.trim().toLowerCase() : null; + searchParams.put(QUEUE_NAME, queuename); + searchParams.put(SEVERITY, form.get(SEVERITY)); + searchParams.put(JOB_TYPE, form.get(JOB_TYPE)); + searchParams.put(ANALYSIS, form.get(ANALYSIS)); + searchParams.put(FINISHED_TIME_BEGIN, form.get(FINISHED_TIME_BEGIN)); + searchParams.put(FINISHED_TIME_END, form.get(FINISHED_TIME_END)); + searchParams.put(STARTED_TIME_BEGIN, form.get(STARTED_TIME_BEGIN)); + searchParams.put(STARTED_TIME_END, form.get(STARTED_TIME_END)); + + return searchParams; + } + + /** + * Build SQL predicates for Search Query + * + * @param selectParams The fields to select from the table + * @param searchParams The fields to query on the table + * @return An sql expression on App Result + */ + public static Query generateSearchQuery(String selectParams, Map searchParams) { + if (searchParams == null || searchParams.isEmpty()) { + return AppResult.find.select(selectParams).order().desc(AppResult.TABLE.FINISH_TIME); + } + ExpressionList query = AppResult.find.select(selectParams).where(); + + // Build predicates + String username = searchParams.get(USERNAME); + if (Utils.isSet(username)) { + query = query.eq(AppResult.TABLE.USERNAME, username); + } + + String queuename = searchParams.get(QUEUE_NAME); + if (Utils.isSet(queuename)) { + query = query.eq(AppResult.TABLE.QUEUE_NAME, queuename); + } + String jobType = searchParams.get(JOB_TYPE); + if (Utils.isSet(jobType)) { + query = query.eq(AppResult.TABLE.JOB_TYPE, jobType); + } + String severity = searchParams.get(SEVERITY); + if (Utils.isSet(severity)) { + String analysis = searchParams.get(ANALYSIS); + if (Utils.isSet(analysis)) { + query = + query.eq(AppResult.TABLE.APP_HEURISTIC_RESULTS + "." + AppHeuristicResult.TABLE.HEURISTIC_NAME, analysis) + .ge(AppResult.TABLE.APP_HEURISTIC_RESULTS + "." + AppHeuristicResult.TABLE.SEVERITY, severity); + } else { + query = query.ge(AppResult.TABLE.SEVERITY, severity); } - if (severity != null && !severity.isEmpty()) { - query = query.ge("heuristicResults.severity", severity); + } + + // Time Predicates. Both the startedTimeBegin and startedTimeEnd are inclusive in the filter + String startedTimeBegin = searchParams.get(STARTED_TIME_BEGIN); + if (Utils.isSet(startedTimeBegin)) { + long time = parseTime(startedTimeBegin); + if (time > 0) { + query = query.ge(AppResult.TABLE.START_TIME, time); } - if (analysis != null && !analysis.isEmpty()) { - query = query.eq("heuristicResults.analysisName", analysis); + } + String startedTimeEnd = searchParams.get(STARTED_TIME_END); + if (Utils.isSet(startedTimeEnd)) { + long time = parseTime(startedTimeEnd); + if (time > 0) { + query = query.le(AppResult.TABLE.START_TIME, time); } - if (dateStart != null && !dateStart.isEmpty()) { - try { - Date date = dateFormat.parse(dateStart); - query = query.gt("startTime", date.getTime()); - } catch (ParseException e) { - e.printStackTrace(); - } + } + + String finishedTimeBegin = searchParams.get(FINISHED_TIME_BEGIN); + if (Utils.isSet(finishedTimeBegin)) { + long time = parseTime(finishedTimeBegin); + if (time > 0) { + query = query.ge(AppResult.TABLE.FINISH_TIME, time); + } + } + String finishedTimeEnd = searchParams.get(FINISHED_TIME_END); + if (Utils.isSet(finishedTimeEnd)) { + long time = parseTime(finishedTimeEnd); + if (time > 0) { + query = query.le(AppResult.TABLE.FINISH_TIME, time); } - if (dateEnd != null && !dateEnd.isEmpty()) { - try { - Date date = dateFormat.parse(dateEnd); - Calendar c = Calendar.getInstance(); - c.setTime(date); - c.add(Calendar.DATE, 1); - date = c.getTime(); - query = query.lt("startTime", date.getTime()); - } catch (ParseException e) { - e.printStackTrace(); + } + + // If queried by start time then sort the results by start time. + if (Utils.isSet(startedTimeBegin) || Utils.isSet(startedTimeEnd)) { + return query.order().desc(AppResult.TABLE.START_TIME); + } else { + return query.order().desc(AppResult.TABLE.FINISH_TIME); + } + } + + /** + Controls the Compare Feature + */ + public static Result compare() { + DynamicForm form = Form.form().bindFromRequest(request()); + String partialFlowExecId1 = form.get(COMPARE_FLOW_ID1); + partialFlowExecId1 = (partialFlowExecId1 != null) ? partialFlowExecId1.trim() : null; + String partialFlowExecId2 = form.get(COMPARE_FLOW_ID2); + partialFlowExecId2 = (partialFlowExecId2 != null) ? partialFlowExecId2.trim() : null; + + List results1 = null; + List results2 = null; + if (partialFlowExecId1 != null && !partialFlowExecId1.isEmpty() && partialFlowExecId2 != null && !partialFlowExecId2.isEmpty()) { + IdUrlPair flowExecIdPair1 = bestSchedulerInfoMatchGivenPartialId(partialFlowExecId1, AppResult.TABLE.FLOW_EXEC_ID); + IdUrlPair flowExecIdPair2 = bestSchedulerInfoMatchGivenPartialId(partialFlowExecId2, AppResult.TABLE.FLOW_EXEC_ID); + results1 = AppResult.find + .select(AppResult.getSearchFields() + "," + AppResult.TABLE.JOB_DEF_ID + "," + AppResult.TABLE.JOB_DEF_URL + + "," + AppResult.TABLE.FLOW_EXEC_ID + "," + AppResult.TABLE.FLOW_EXEC_URL) + .where().eq(AppResult.TABLE.FLOW_EXEC_ID, flowExecIdPair1.getId()).setMaxRows(100) + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, AppHeuristicResult.getSearchFields()) + .findList(); + results2 = AppResult.find + .select( + AppResult.getSearchFields() + "," + AppResult.TABLE.JOB_DEF_ID + "," + AppResult.TABLE.JOB_DEF_URL + "," + + AppResult.TABLE.FLOW_EXEC_ID + "," + AppResult.TABLE.FLOW_EXEC_URL) + .where().eq(AppResult.TABLE.FLOW_EXEC_ID, flowExecIdPair2.getId()).setMaxRows(100) + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, AppHeuristicResult.getSearchFields()) + .findList(); + } + return ok(comparePage.render(compareResults.render(compareFlows(results1, results2)))); + } + + /** + * Helper Method for the compare controller. + * This Compares 2 flow executions at job level. + * + * @param results1 The list of jobs under flow execution 1 + * @param results2 The list of jobs under flow execution 2 + * @return A map of Job Urls to the list of jobs corresponding to the 2 flow execution urls + */ + private static Map>> compareFlows(List results1, List results2) { + + Map>> jobDefMap = new HashMap>>(); + + if (results1 != null && !results1.isEmpty() && results2 != null && !results2.isEmpty()) { + + IdUrlPair flow1 = new IdUrlPair(results1.get(0).flowExecId, results1.get(0).flowExecUrl); + IdUrlPair flow2 = new IdUrlPair(results2.get(0).flowExecId, results2.get(0).flowExecUrl); + + Map> map1 = ControllerUtil.groupJobs(results1, ControllerUtil.GroupBy.JOB_DEFINITION_ID); + Map> map2 = ControllerUtil.groupJobs(results2, ControllerUtil.GroupBy.JOB_DEFINITION_ID); + + final Set group1 = new TreeSet(new Comparator() { + public int compare(final IdUrlPair o1, final IdUrlPair o2) { + return o1.getId().compareToIgnoreCase(o2.getId()); + } + }); + group1.addAll(map1.keySet()); + final Set group2 = new TreeSet(new Comparator() { + public int compare(final IdUrlPair o1, final IdUrlPair o2) { + return o1.getId().compareToIgnoreCase(o2.getId()); } + }); + group2.addAll(map2.keySet()); + + // Display jobs that are common to the two flows first followed by jobs in flow 1 and flow 2. + Set CommonJobs = Sets.intersection(group1, group2); + Set orderedFlowSet = Sets.union(CommonJobs, group1); + Set union = Sets.union(orderedFlowSet, group2); + + for (IdUrlPair pair : union) { + Map> flowExecMap = new LinkedHashMap>(); + flowExecMap.put(flow1, map1.get(pair)); + flowExecMap.put(flow2, map2.get(pair)); + jobDefMap.put(pair, flowExecMap); } - List results = - query.order().desc("analysisTime").setMaxRows(50) - .fetch("heuristicResults").findList(); - return ok(search.render(multijob.render("Results", results))); } + return jobDefMap; } - public static Result dashboard(int page) { - long now = System.currentTimeMillis(); - if (now - lastFetch > FETCH_DELAY) { - numJobsAnalyzed = - JobResult.find.where().gt("analysisTime", now - DAY).findRowCount(); - numJobsCritical = - JobResult.find.where().gt("analysisTime", now - DAY) - .eq("severity", Severity.CRITICAL.getValue()).findRowCount(); - numJobsSevere = - JobResult.find.where().gt("analysisTime", now - DAY) - .eq("severity", Severity.SEVERE.getValue()).findRowCount(); - lastFetch = now; - } - List results = - JobResult.find.where().gt("analysisTime", now - DAY).order() - .desc("analysisTime").setMaxRows(50).fetch("heuristicResults") - .findList(); + /** + * Returns the new version of flow history + */ + public static Result flowHistory() { + return getFlowHistory(Version.NEW); + } - return ok(index.render(numJobsAnalyzed, numJobsSevere, numJobsCritical, - multijob.render("Latest analysis", results))); + /** + * Returns the old version of flow history + */ + public static Result oldFlowHistory() { + return getFlowHistory(Version.OLD); } - public static Result help() { + /** + * Returns the flowHistory based on the version provided + * + * @param version Can be either new or old + * @return The flowhistory page based on the version provided + */ + private static Result getFlowHistory(Version version) { DynamicForm form = Form.form().bindFromRequest(request()); - String topic = form.get("topic"); + String partialFlowDefId = form.get(FLOW_DEF_ID); + partialFlowDefId = (partialFlowDefId != null) ? partialFlowDefId.trim() : null; - Html page = null; - String title = "Help"; + boolean hasSparkJob = false; - if (topic != null && !topic.isEmpty()) { - if (topic.equals(MapperDataSkewHeuristic.heuristicName)) { - page = helpMapperDataSkew.render(); - } else if (topic.equals(ReducerDataSkewHeuristic.heuristicName)) { - page = helpReducerDataSkew.render(); - } else if (topic.equals(MapperInputSizeHeuristic.heuristicName)) { - page = helpMapperInputSize.render(); - } else if (topic.equals(MapperSpeedHeuristic.heuristicName)) { - page = helpMapperSpeed.render(); - } else if (topic.equals(ReducerTimeHeuristic.heuristicName)) { - page = helpReducerTime.render(); - } else if (topic.equals(ShuffleSortHeuristic.heuristicName)) { - page = helpShuffleSort.render(); - } else if (topic.equals(ElephantAnalyser.NO_DATA)) { - page = helpNoData.render(); + String graphType = form.get("select-graph-type"); + + // get the graph type + if (graphType == null) { + graphType = "resources"; + } + + if (!Utils.isSet(partialFlowDefId)) { + if (version.equals(Version.NEW)) { + return ok(flowHistoryPage + .render(partialFlowDefId, graphType, flowHistoryResults.render(null, null, null, null))); + } else { + return ok( + oldFlowHistoryPage.render(partialFlowDefId, graphType, oldFlowHistoryResults.render(null, null, null, null))); } - if (page != null) { - title = topic; + } + + IdUrlPair flowDefPair = bestSchedulerInfoMatchGivenPartialId(partialFlowDefId, AppResult.TABLE.FLOW_DEF_ID); + + List results; + + if (graphType.equals("time") || graphType.equals("resources")) { + + // if graph type is time or resources, we don't need the result from APP_HEURISTIC_RESULTS + results = AppResult.find.select( + AppResult.getSearchFields() + "," + AppResult.TABLE.FLOW_EXEC_ID + "," + AppResult.TABLE.FLOW_EXEC_URL + "," + + AppResult.TABLE.JOB_DEF_ID + "," + AppResult.TABLE.JOB_DEF_URL + "," + AppResult.TABLE.JOB_NAME) + .where() + .eq(AppResult.TABLE.FLOW_DEF_ID, flowDefPair.getId()) + .order() + .desc(AppResult.TABLE.FINISH_TIME) + .setMaxRows(JOB_HISTORY_LIMIT) + .findList(); + } else { + + // Fetch available flow executions with latest JOB_HISTORY_LIMIT mr jobs. + results = AppResult.find.select( + AppResult.getSearchFields() + "," + AppResult.TABLE.FLOW_EXEC_ID + "," + AppResult.TABLE.FLOW_EXEC_URL + "," + + AppResult.TABLE.JOB_DEF_ID + "," + AppResult.TABLE.JOB_DEF_URL + "," + AppResult.TABLE.JOB_NAME) + .where() + .eq(AppResult.TABLE.FLOW_DEF_ID, flowDefPair.getId()) + .order() + .desc(AppResult.TABLE.FINISH_TIME) + .setMaxRows(JOB_HISTORY_LIMIT) + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, AppHeuristicResult.getSearchFields()) + .findList(); + } + if (results.size() == 0) { + return notFound("Unable to find record for flow def id: " + flowDefPair.getId()); + } + + for (AppResult result : results) { + if (result.jobType.equals("Spark")) { + hasSparkJob = true; } } - return ok(help.render(title, page)); + Map> flowExecIdToJobsMap = ControllerUtil + .limitHistoryResults(ControllerUtil.groupJobs(results, ControllerUtil.GroupBy.FLOW_EXECUTION_ID), + results.size(), MAX_HISTORY_LIMIT); + + // Compute flow execution data + List filteredResults = new ArrayList(); // All jobs starting from latest execution + List flowExecTimeList = new ArrayList(); // To map executions to resp execution time + Map>> executionMap = + new LinkedHashMap>>(); + for (Map.Entry> entry : flowExecIdToJobsMap.entrySet()) { + + // Reverse the list content from desc order of finish time to increasing order so that when grouping we get + // the job list in the order of completion. + List mrJobsList = Lists.reverse(entry.getValue()); + + // Flow exec time is the finish time of the last mr job in the flow + flowExecTimeList.add(mrJobsList.get(mrJobsList.size() - 1).finishTime); + + filteredResults.addAll(mrJobsList); + executionMap.put(entry.getKey(), ControllerUtil.groupJobs(mrJobsList, ControllerUtil.GroupBy.JOB_DEFINITION_ID)); + } + + // Calculate unique list of jobs (job def url) to maintain order across executions. List will contain job def urls + // from latest execution first followed by any other extra job def url that may appear in previous executions. + final Map idPairToJobNameMap = new ListOrderedMap() ; + + Map> filteredTempMap = + ControllerUtil.groupJobs(filteredResults, ControllerUtil.GroupBy.JOB_DEFINITION_ID); + + List>> filteredMapList = + new LinkedList>>( filteredTempMap.entrySet() ); + + Collections.sort(filteredMapList, new Comparator>>() { + @Override + public int compare(Map.Entry> idUrlPairListMap, Map.Entry> t1) { + return ( new Long(idUrlPairListMap.getValue().get(0).finishTime)).compareTo(t1.getValue().get(0).finishTime); + } + }); + + + for (Map.Entry> entry : filteredMapList) { + idPairToJobNameMap.put(entry.getKey(), entry.getValue().get(0).jobName); + } + + if (version.equals(Version.NEW)) { + if (graphType.equals("heuristics")) { + return ok(flowHistoryPage.render(flowDefPair.getId(), graphType, + flowHistoryResults.render(flowDefPair, executionMap, idPairToJobNameMap, flowExecTimeList))); + } else if (graphType.equals("resources") || graphType.equals("time")) { + return ok(flowHistoryPage.render(flowDefPair.getId(), graphType, flowMetricsHistoryResults + .render(flowDefPair, graphType, executionMap, idPairToJobNameMap, flowExecTimeList))); + } + } else { + if (graphType.equals("heuristics")) { + return ok(oldFlowHistoryPage.render(flowDefPair.getId(), graphType, + oldFlowHistoryResults.render(flowDefPair, executionMap, idPairToJobNameMap, flowExecTimeList))); + } else if (graphType.equals("resources") || graphType.equals("time")) { + if (hasSparkJob) { + return notFound("Cannot plot graph for " + graphType + " since it contains a spark job. " + graphType + + " graphs are not supported for spark right now"); + } else { + return ok(oldFlowHistoryPage.render(flowDefPair.getId(), graphType, oldFlowMetricsHistoryResults + .render(flowDefPair, graphType, executionMap, idPairToJobNameMap, flowExecTimeList))); + } + } + } + return notFound("Unable to find graph type: " + graphType); + } + + /** + * Controls Job History. Displays at max MAX_HISTORY_LIMIT executions. Old version of the job history + */ + public static Result oldJobHistory() { + return getJobHistory(Version.OLD); + } + + /** + * Controls Job History. Displays at max MAX_HISTORY_LIMIT executions. New version of the job history + */ + public static Result jobHistory() { + return getJobHistory(Version.NEW); } /** - * A listing of all MR jobs from historic executions of the same job + * Returns the job history. Returns at max MAX_HISTORY_LIMIT executions. + * + * @param version The version of job history to return + * @return The job history page based on the version. */ - public static Result allJobExecs() { + private static Result getJobHistory(Version version) { + DynamicForm form = Form.form().bindFromRequest(request()); + String partialJobDefId = form.get(JOB_DEF_ID); + partialJobDefId = (partialJobDefId != null) ? partialJobDefId.trim() : null; + + boolean hasSparkJob = false; + // get the graph type + String graphType = form.get("select-graph-type"); + + if (graphType == null) { + graphType = "resources"; + } - String jobUrl = request().queryString().get("job")[0]; - List results = JobResult.find.where().eq("job_url", jobUrl).findList(); + if (!Utils.isSet(partialJobDefId)) { + if (version.equals(Version.NEW)) { + return ok( + jobHistoryPage.render(partialJobDefId, graphType, jobHistoryResults.render(null, null, -1, null))); + } else { + return ok(oldJobHistoryPage.render(partialJobDefId, graphType, oldJobHistoryResults.render(null, null, -1, null))); + } + } + IdUrlPair jobDefPair = bestSchedulerInfoMatchGivenPartialId(partialJobDefId, AppResult.TABLE.JOB_DEF_ID); + + List results; + + if (graphType.equals("time") || graphType.equals("resources")) { + // we don't need APP_HEURISTIC_RESULT_DETAILS data to plot for time and resources + results = AppResult.find.select( + AppResult.getSearchFields() + "," + AppResult.TABLE.FLOW_EXEC_ID + "," + AppResult.TABLE.FLOW_EXEC_URL) + .where() + .eq(AppResult.TABLE.JOB_DEF_ID, jobDefPair.getId()) + .order() + .desc(AppResult.TABLE.FINISH_TIME) + .setMaxRows(JOB_HISTORY_LIMIT) + .findList(); + } else { + // Fetch all job executions + results = AppResult.find.select( + AppResult.getSearchFields() + "," + AppResult.TABLE.FLOW_EXEC_ID + "," + AppResult.TABLE.FLOW_EXEC_URL) + .where() + .eq(AppResult.TABLE.JOB_DEF_ID, jobDefPair.getId()) + .order() + .desc(AppResult.TABLE.FINISH_TIME) + .setMaxRows(JOB_HISTORY_LIMIT) + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, "*") + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS + "." + AppHeuristicResult.TABLE.APP_HEURISTIC_RESULT_DETAILS, "*") + .findList(); + } + + for (AppResult result : results) { + if (result.jobType.equals("Spark")) { + hasSparkJob = true; + } + } if (results.size() == 0) { - return notFound("Unable to find record on job definition url: " + jobUrl); + return notFound("Unable to find record for job def id: " + jobDefPair.getId()); } + Map> flowExecIdToJobsMap = ControllerUtil + .limitHistoryResults(ControllerUtil.groupJobs(results, ControllerUtil.GroupBy.FLOW_EXECUTION_ID), + results.size(), MAX_HISTORY_LIMIT); + + // Compute job execution data + List flowExecTimeList = new ArrayList(); + int maxStages = 0; + Map> executionMap = new LinkedHashMap>(); + for (Map.Entry> entry : flowExecIdToJobsMap.entrySet()) { + + // Reverse the list content from desc order of finish time to increasing order so that when grouping we get + // the job list in the order of completion. + List mrJobsList = Lists.reverse(entry.getValue()); - Map> map = groupJobsByExec(results); - return ok(related.render(jobUrl, map)); + // Get the finish time of the last mr job that completed in current flow. + flowExecTimeList.add(mrJobsList.get(mrJobsList.size() - 1).finishTime); + + // Find the maximum number of mr stages for any job execution + int stageSize = flowExecIdToJobsMap.get(entry.getKey()).size(); + if (stageSize > maxStages) { + maxStages = stageSize; + } + + executionMap.put(entry.getKey(), Lists.reverse(flowExecIdToJobsMap.get(entry.getKey()))); + } + if (maxStages > STAGE_LIMIT) { + maxStages = STAGE_LIMIT; + } + if (version.equals(Version.NEW)) { + if (graphType.equals("heuristics")) { + return ok(jobHistoryPage.render(jobDefPair.getId(), graphType, + jobHistoryResults.render(jobDefPair, executionMap, maxStages, flowExecTimeList))); + } else if (graphType.equals("resources") || graphType.equals("time")) { + return ok(jobHistoryPage.render(jobDefPair.getId(), graphType, + jobMetricsHistoryResults.render(jobDefPair, graphType, executionMap, maxStages, flowExecTimeList))); + } + } else { + if (graphType.equals("heuristics")) { + return ok(oldJobHistoryPage.render(jobDefPair.getId(), graphType, + oldJobHistoryResults.render(jobDefPair, executionMap, maxStages, flowExecTimeList))); + } else if (graphType.equals("resources") || graphType.equals("time")) { + if (hasSparkJob) { + return notFound("Resource and time graph are not supported for spark right now"); + } else { + return ok(oldJobHistoryPage.render(jobDefPair.getId(), graphType, + oldJobMetricsHistoryResults.render(jobDefPair, graphType, executionMap, maxStages, flowExecTimeList))); + } + } + } + return notFound("Unable to find graph type: " + graphType); } /** - * A listing of all other jobs that were found from the same flow execution. + * Returns the help based on the version + * + * @param version The version for which help page has to be returned + * @return The help page based on the version */ - public static Result flowRelated() { + private static Result getHelp(Version version) { + DynamicForm form = Form.form().bindFromRequest(request()); + String topic = form.get("topic"); + Html page = null; + String title = "Help"; + if (topic != null && !topic.isEmpty()) { + // check if it is a heuristic help + page = ElephantContext.instance().getHeuristicToView().get(topic); - String execUrl = request().queryString().get("flowexec")[0]; - List results = JobResult.find.where().eq("flow_exec_url", execUrl).findList(); + // check if it is a metrics help + if (page == null) { + page = getMetricsNameView().get(topic); + } - if (results.size() == 0) { - return notFound("Unable to find record on flow exec: " + execUrl); + if (page != null) { + title = topic; + } } - Map> map = groupJobsByExec(results); - return ok(related.render(execUrl, map)); + if (version.equals(Version.NEW)) { + return ok(helpPage.render(title, page)); + } + return ok(oldHelpPage.render(title, page)); } - public static Result restJobResult(String jobId) { + /** + * Controls the new Help Page + */ + public static Result oldHelp() { + return getHelp(Version.OLD); + } - if (jobId == null || jobId.isEmpty()) { - return badRequest("No job id provided."); + /** + * Controls the old Help Page + */ + public static Result help() { + return getHelp(Version.NEW); + } + + + private static Map getMetricsNameView() { + Map metricsViewMap = new HashMap(); + metricsViewMap.put(Metrics.RUNTIME.getText(), helpRuntime.render()); + metricsViewMap.put(Metrics.WAIT_TIME.getText(), helpWaittime.render()); + metricsViewMap.put(Metrics.USED_RESOURCES.getText(), helpUsedResources.render()); + metricsViewMap.put(Metrics.WASTED_RESOURCES.getText(), helpWastedResources.render()); + return metricsViewMap; + } + /** + * Parse the string for time in long + * + * @param time The string to be parsed + * @return the epoch value + */ + private static long parseTime(String time) { + long unixTime = 0; + try { + unixTime = Long.parseLong(time); + } catch (NumberFormatException ex) { + // return 0 } + return unixTime; + } - JobResult result = JobResult.find.byId(jobId); + /** + * Rest API for searching a particular job information + * E.g, localhost:8080/rest/job?id=xyz + */ + public static Result restAppResult(String id) { - if (result == null) { - return notFound("Unable to find record on job id: " + jobId); + if (id == null || id.isEmpty()) { + return badRequest("No job id provided."); + } + if (id.contains("job")) { + id = id.replaceAll("job", "application"); } - return ok(Json.toJson(result)); + AppResult result = AppResult.find.select("*") + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, "*") + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS + "." + AppHeuristicResult.TABLE.APP_HEURISTIC_RESULT_DETAILS, "*") + .where() + .idEq(id) + .findUnique(); + + if (result != null) { + return ok(Json.toJson(result)); + } else { + return notFound("Unable to find record on id: " + id); + } } - public static Result restJobExecResult(String jobExecUrl) { + /** + * Rest API for searching all jobs triggered by a particular Scheduler Job + * E.g., localhost:8080/rest/jobexec?id=xyz + */ + public static Result restJobExecResult(String jobExecId) { - if (jobExecUrl == null || jobExecUrl.isEmpty()) { + if (jobExecId == null || jobExecId.isEmpty()) { return badRequest("No job exec url provided."); } - List result = JobResult.find.where().eq("job_exec_url", jobExecUrl).findList(); + List result = AppResult.find.select("*") + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, "*") + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS + "." + AppHeuristicResult.TABLE.APP_HEURISTIC_RESULT_DETAILS, "*") + .where() + .eq(AppResult.TABLE.JOB_EXEC_ID, jobExecId) + .findList(); if (result.size() == 0) { - return notFound("Unable to find record on job exec url: " + jobExecUrl); + return notFound("Unable to find record on job exec url: " + jobExecId); } return ok(Json.toJson(result)); } - public static Result restFlowExecResult(String flowExecUrl) { + /** + * Rest API for searching all jobs under a particular flow execution + * E.g., localhost:8080/rest/flowexec?id=xyz + */ + public static Result restFlowExecResult(String flowExecId) { - if (flowExecUrl == null || flowExecUrl.isEmpty()) { + if (flowExecId == null || flowExecId.isEmpty()) { return badRequest("No flow exec url provided."); } - List results = JobResult.find.where().eq("flow_exec_url", flowExecUrl).findList(); + List results = AppResult.find.select("*") + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, "*") + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS + "." + AppHeuristicResult.TABLE.APP_HEURISTIC_RESULT_DETAILS, "*") + .where() + .eq(AppResult.TABLE.FLOW_EXEC_ID, flowExecId) + .findList(); if (results.size() == 0) { - return notFound("Unable to find record on flow exec url: " + flowExecUrl); + return notFound("Unable to find record on flow exec url: " + flowExecId); } - Map> resMap = groupJobsByExec(results); + Map> groupMap = ControllerUtil.groupJobs(results, ControllerUtil.GroupBy.JOB_EXECUTION_ID); + + Map> resMap = new HashMap>(); + for (Map.Entry> entry : groupMap.entrySet()) { + IdUrlPair jobExecPair = entry.getKey(); + List value = entry.getValue(); + resMap.put(jobExecPair.getId(), value); + } return ok(Json.toJson(resMap)); } - private static Map> groupJobsByExec(List results) { - Map> resultMap = new HashMap>(); - for (JobResult result : results) { - String field = result.jobExecUrl; - if (resultMap.containsKey(field)) { - resultMap.get(field).add(result); + + + /** + * The Rest API for Search Feature + * + * http://localhost:8080/rest/search?username=abc&job-type=HadoopJava + */ + public static Result restSearch() { + DynamicForm form = Form.form().bindFromRequest(request()); + String appId = form.get(APP_ID); + appId = appId != null ? appId.trim() : ""; + if (appId.contains("job")) { + appId = appId.replaceAll("job", "application"); + } + String flowExecId = form.get(FLOW_EXEC_ID); + flowExecId = (flowExecId != null) ? flowExecId.trim() : null; + if (!appId.isEmpty()) { + AppResult result = AppResult.find.select("*") + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, "*") + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS + "." + AppHeuristicResult.TABLE.APP_HEURISTIC_RESULT_DETAILS, + "*") + .where() + .idEq(appId) + .findUnique(); + if (result != null) { + return ok(Json.toJson(result)); } else { - List list = new ArrayList(); - list.add(result); - resultMap.put(field, list); + return notFound("Unable to find record on id: " + appId); + } + } else if (flowExecId != null && !flowExecId.isEmpty()) { + List results = AppResult.find.select("*") + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, "*") + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS + "." + AppHeuristicResult.TABLE.APP_HEURISTIC_RESULT_DETAILS, + "*") + .where() + .eq(AppResult.TABLE.FLOW_EXEC_ID, flowExecId) + .findList(); + if (results.size() == 0) { + return notFound("Unable to find record on flow execution: " + flowExecId); + } else { + return ok(Json.toJson(results)); + } + } + + int page = 1; + if (request().queryString().containsKey(PAGE)) { + page = Integer.parseInt(request().queryString().get(PAGE)[0]); + if (page <= 0) { + page = 1; } } - return resultMap; - } - public static Result testEmail() { + Query query = generateSearchQuery("*", getSearchParams()); + List results = query.setFirstRow((page - 1) * REST_PAGE_LENGTH) + .setMaxRows(REST_PAGE_LENGTH) + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, "*") + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS + "." + AppHeuristicResult.TABLE.APP_HEURISTIC_RESULT_DETAILS, "*") + .findList(); + + if (results.size() == 0) { + return notFound("No records"); + } else { + return ok(Json.toJson(results)); + } + } + /** + * The Rest API for Compare Feature + * E.g., localhost:8080/rest/compare?flow-exec-id1=abc&flow-exec-id2=xyz + */ + public static Result restCompare() { DynamicForm form = Form.form().bindFromRequest(request()); - String jobId = form.get("jobid"); - if (jobId != null && !jobId.isEmpty()) { - JobResult result = JobResult.find.byId(jobId); - if (result != null) { - return ok(emailcritical.render(result)); + String flowExecId1 = form.get(COMPARE_FLOW_ID1); + flowExecId1 = (flowExecId1 != null) ? flowExecId1.trim() : null; + String flowExecId2 = form.get(COMPARE_FLOW_ID2); + flowExecId2 = (flowExecId2 != null) ? flowExecId2.trim() : null; + + List results1 = null; + List results2 = null; + if (flowExecId1 != null && !flowExecId1.isEmpty() && flowExecId2 != null && !flowExecId2.isEmpty()) { + results1 = AppResult.find.select("*") + .where() + .eq(AppResult.TABLE.FLOW_EXEC_ID, flowExecId1) + .setMaxRows(100) + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, "*") + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS + "." + AppHeuristicResult.TABLE.APP_HEURISTIC_RESULT_DETAILS, + "*") + .findList(); + results2 = AppResult.find.select("*") + .where() + .eq(AppResult.TABLE.FLOW_EXEC_ID, flowExecId2) + .setMaxRows(100) + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, "*") + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS + "." + AppHeuristicResult.TABLE.APP_HEURISTIC_RESULT_DETAILS, + "*") + .findList(); + } + + Map>> compareResults = compareFlows(results1, results2); + + Map>> resMap = new HashMap>>(); + for (Map.Entry>> entry : compareResults.entrySet()) { + IdUrlPair jobExecPair = entry.getKey(); + Map> value = entry.getValue(); + for (Map.Entry> innerEntry : value.entrySet()) { + IdUrlPair flowExecPair = innerEntry.getKey(); + List results = innerEntry.getValue(); + Map> resultMap = new HashMap>(); + resultMap.put(flowExecPair.getId(), results); + resMap.put(jobExecPair.getId(), resultMap); } } - return notFound(); + + return ok(Json.toJson(resMap)); + } + + /** + * The data for plotting the flow history graph + * + *
+   * {@code
+   *   [
+   *     {
+   *       "flowtime": ,
+   *       "score": 1000,
+   *       "jobscores": [
+   *         {
+   *           "jobdefurl:" "url",
+   *           "jobexecurl:" "url",
+   *           "jobscore": 500
+   *         },
+   *         {
+   *           "jobdefurl:" "url",
+   *           "jobexecurl:" "url",
+   *           "jobscore": 500
+   *         }
+   *       ]
+   *     },
+   *     {
+   *       "flowtime": ,
+   *       "score": 700,
+   *       "jobscores": [
+   *         {
+   *           "jobdefurl:" "url",
+   *           "jobexecurl:" "url",
+   *           "jobscore": 0
+   *         },
+   *         {
+   *           "jobdefurl:" "url",
+   *           "jobexecurl:" "url",
+   *           "jobscore": 700
+   *         }
+   *       ]
+   *     }
+   *   ]
+   * }
+   * 
+ */ + public static Result restFlowGraphData(String flowDefId) { + JsonArray datasets = new JsonArray(); + if (flowDefId == null || flowDefId.isEmpty()) { + return ok(new Gson().toJson(datasets)); + } + + // Fetch available flow executions with latest JOB_HISTORY_LIMIT mr jobs. + List results = getRestFlowAppResults(flowDefId); + + if (results.size() == 0) { + logger.info("No results for Job url"); + } + Map> flowExecIdToJobsMap = + ControllerUtil.limitHistoryResults(ControllerUtil.groupJobs(results, ControllerUtil.GroupBy.FLOW_EXECUTION_ID), results.size(), MAX_HISTORY_LIMIT); + + // Compute the graph data starting from the earliest available execution to latest + List keyList = new ArrayList(flowExecIdToJobsMap.keySet()); + for (int i = keyList.size() - 1; i >= 0; i--) { + IdUrlPair flowExecPair = keyList.get(i); + int flowPerfScore = 0; + JsonArray jobScores = new JsonArray(); + List mrJobsList = Lists.reverse(flowExecIdToJobsMap.get(flowExecPair)); + Map> jobDefIdToJobsMap = ControllerUtil.groupJobs(mrJobsList, ControllerUtil.GroupBy.JOB_DEFINITION_ID); + + // Compute the execution records. Note that each entry in the jobDefIdToJobsMap will have at least one AppResult + for (IdUrlPair jobDefPair : jobDefIdToJobsMap.keySet()) { + // Compute job perf score + int jobPerfScore = 0; + for (AppResult job : jobDefIdToJobsMap.get(jobDefPair)) { + jobPerfScore += job.score; + } + + // A job in jobscores list + JsonObject jobScore = new JsonObject(); + jobScore.addProperty("jobscore", jobPerfScore); + jobScore.addProperty("jobdefurl", jobDefPair.getUrl()); + jobScore.addProperty("jobexecurl", jobDefIdToJobsMap.get(jobDefPair).get(0).jobExecUrl); + + jobScores.add(jobScore); + flowPerfScore += jobPerfScore; + } + + // Execution record + JsonObject dataset = new JsonObject(); + dataset.addProperty("flowtime", mrJobsList.get(mrJobsList.size() - 1).finishTime); + dataset.addProperty("score", flowPerfScore); + dataset.add("jobscores", jobScores); + + datasets.add(dataset); + } + + return ok(new Gson().toJson(datasets)); + } + + /** + * The data for plotting the job history graph. While plotting the job history + * graph an ajax call is made to this to fetch the graph data. + * + * Data Returned: + *
+   * {@code
+   *   [
+   *     {
+   *       "flowtime": ,
+   *       "score": 1000,
+   *       "stagescores": [
+   *         {
+   *           "stageid:" "id",
+   *           "stagescore": 500
+   *         },
+   *         {
+   *           "stageid:" "id",
+   *           "stagescore": 500
+   *         }
+   *       ]
+   *     },
+   *     {
+   *       "flowtime": ,
+   *       "score": 700,
+   *       "stagescores": [
+   *         {
+   *           "stageid:" "id",
+   *           "stagescore": 0
+   *         },
+   *         {
+   *           "stageid:" "id",
+   *           "stagescore": 700
+   *         }
+   *       ]
+   *     }
+   *   ]
+   * }
+   * 
+ */ + public static Result restJobGraphData(String jobDefId) { + JsonArray datasets = new JsonArray(); + if (jobDefId == null || jobDefId.isEmpty()) { + return ok(new Gson().toJson(datasets)); + } + + // Fetch available flow executions with latest JOB_HISTORY_LIMIT mr jobs. + List results = getRestJobAppResults(jobDefId); + + if (results.size() == 0) { + logger.info("No results for Job url"); + } + Map> flowExecIdToJobsMap = + ControllerUtil.limitHistoryResults(ControllerUtil.groupJobs(results, ControllerUtil.GroupBy.FLOW_EXECUTION_ID), results.size(), MAX_HISTORY_LIMIT); + + // Compute the graph data starting from the earliest available execution to latest + List keyList = new ArrayList(flowExecIdToJobsMap.keySet()); + for (int i = keyList.size() - 1; i >= 0; i--) { + IdUrlPair flowExecPair = keyList.get(i); + int jobPerfScore = 0; + JsonArray stageScores = new JsonArray(); + List mrJobsList = Lists.reverse(flowExecIdToJobsMap.get(flowExecPair)); + for (AppResult appResult : flowExecIdToJobsMap.get(flowExecPair)) { + + // Each MR job triggered by jobDefId for flowExecId + int mrPerfScore = 0; + for (AppHeuristicResult appHeuristicResult : appResult.yarnAppHeuristicResults) { + mrPerfScore += appHeuristicResult.score; + } + + // A particular mr stage + JsonObject stageScore = new JsonObject(); + stageScore.addProperty("stageid", appResult.id); + stageScore.addProperty("stagescore", mrPerfScore); + + stageScores.add(stageScore); + jobPerfScore += mrPerfScore; + } + + // Execution record + JsonObject dataset = new JsonObject(); + dataset.addProperty("flowtime", mrJobsList.get(mrJobsList.size() - 1).finishTime); + dataset.addProperty("score", jobPerfScore); + dataset.add("stagescores", stageScores); + + datasets.add(dataset); + } + + return ok(new Gson().toJson(datasets)); + } + + /** + * The data for plotting the job history graph using time and resource metrics. While plotting the job history + * graph an ajax call is made to this to fetch the graph data. + * + * Data Returned: + *
+   * [
+   *  {
+   *    "flowtime": 1461234105456,
+   *    "runtime": 2312107,
+   *    "waittime": 118879,
+   *    "resourceused": 304934912,
+   *    "resourcewasted": 172913,
+   *    "jobmetrics": [
+   *      {
+   *        "stageid": "application_1458194917883_1587177",
+   *        "runtime": 642986,
+   *        "waittime": 14016,
+   *        "resourceused": 277352448,
+   *        "resourcewasted": 0
+   *    }],
+   *  },
+   *  {
+   *    "flowtime": 1461237538639,
+   *    "runtime": 2155354,
+   *    "waittime": 112187,
+   *    "resourceused": 293096448,
+   *    "resourcewasted": 400461,
+   *    "jobmetrics": [
+   *      {
+   *        "stageid": "application_1458194917883_1589302",
+   *        "runtime": 548924,
+   *        "waittime": 16903,
+   *        "resourceused": 266217472,
+   *        "resourcewasted": 0
+   *      }]
+   *  }
+   *  ]
+   *
+   * 
+ */ + public static Result restJobMetricsGraphData(String jobDefId) { + JsonArray datasets = new JsonArray(); + if (jobDefId == null || jobDefId.isEmpty()) { + return ok(new Gson().toJson(datasets)); + } + + List results = getRestJobAppResults(jobDefId); + + if (results.size() == 0) { + logger.info("No results for Job url"); + } + Map> flowExecIdToJobsMap = + ControllerUtil.limitHistoryResults(ControllerUtil.groupJobs(results, ControllerUtil.GroupBy.FLOW_EXECUTION_ID), results.size(), MAX_HISTORY_LIMIT); + + // Compute the graph data starting from the earliest available execution to latest + List keyList = new ArrayList(flowExecIdToJobsMap.keySet()); + for (int i = keyList.size() - 1; i >= 0; i--) { + IdUrlPair flowExecPair = keyList.get(i); + int jobPerfScore = 0; + JsonArray stageMetrics = new JsonArray(); + List mrJobsList = Lists.reverse(flowExecIdToJobsMap.get(flowExecPair)); + + long totalMemoryUsed = 0; + long totalMemoryWasted = 0; + long totalDelay = 0; + + for (AppResult appResult : flowExecIdToJobsMap.get(flowExecPair)) { + + // Each MR job triggered by jobDefId for flowExecId + int mrPerfScore = 0; + + for (AppHeuristicResult appHeuristicResult : appResult.yarnAppHeuristicResults) { + mrPerfScore += appHeuristicResult.score; + } + + // A particular mr stage + JsonObject stageMetric = new JsonObject(); + stageMetric.addProperty("stageid", appResult.id); + stageMetric.addProperty("runtime", appResult.finishTime - appResult.startTime); + stageMetric.addProperty("waittime", appResult.totalDelay); + stageMetric.addProperty("resourceused", appResult.resourceUsed); + stageMetric.addProperty("resourcewasted", appResult.resourceWasted); + + stageMetrics.add(stageMetric); + jobPerfScore += mrPerfScore; + totalMemoryUsed += appResult.resourceUsed; + totalMemoryWasted += appResult.resourceWasted; + } + + // Execution record + JsonObject dataset = new JsonObject(); + dataset.addProperty("flowtime", mrJobsList.get(mrJobsList.size() - 1).finishTime); + dataset.addProperty("runtime", Utils.getTotalRuntime(mrJobsList)); + dataset.addProperty("waittime", Utils.getTotalWaittime(mrJobsList)); + dataset.addProperty("resourceused", totalMemoryUsed); + dataset.addProperty("resourcewasted", totalMemoryWasted); + dataset.add("jobmetrics", stageMetrics); + + datasets.add(dataset); + } + + return ok(new Gson().toJson(datasets)); + } + + /** + * + * @param startTime - beginning of the time window + * @param endTime - end of the time window + * @return Json of resourceUsage data for each user for the given time window + * eg. [{"user":"bmr","resourceUsed":168030208,"resourceWasted":27262750}, + * {"user":"payments","resourceUsed":18432,"resourceWasted":3447}, + * {"user":"myu","resourceUsed":558211072,"resourceWasted":81573818}] + */ + public static Result restResourceUsageDataByUser(String startTime, String endTime) { + try { + JsonArray datasets = new JsonArray(); + if(startTime.length() != endTime.length() || + (startTime.length() != 10 && startTime.length() != 13)) { + return status(300); + } + SimpleDateFormat tf = null ; + if( startTime.length() == 10 ) { + tf = new SimpleDateFormat("yyyy-MM-dd"); + } + else { + tf = new SimpleDateFormat("yyyy-MM-dd-HH"); + } + Date start = tf.parse(startTime); + Date end = tf.parse(endTime); + Collection result = getUserResourceUsage(start, end); + + return ok(new Gson().toJson(result)); + } + catch(ParseException ex) { + return status(300,"Invalid datetime format : " + ex.getMessage()); + } + } + + + /** + * Rest data to plot flot history graph using time and resource metrics. While plotting the flow history + * graph an ajax call is made to this to fetch the graph data. + * [ + * { + * "flowtime": 1461744881991, + * "runtime": 3190223, + * "waittime": 368011, + * "resourceused": 180488192, + * "resourcewasted": 0, + * "jobmetrics": [ + * { + * "runtime": 3190223, + * "waittime": 368011, + * "resourceused": 180488192, + * "resourcewasted": 0, + * "jobdefurl": "sampleURL" + * "jobexecurl": "sampleURL" + * } + * ] + * }, + * { + * "flowtime": 1461818409959, + * "runtime": 897490, + * "waittime": 100703, + * "resourceused": 12863488, + * "resourcewasted": 0, + * "jobmetrics": [ + * { + * "runtime": 897490, + * "waittime": 100703, + * "resourceused": 12863488, + * "resourcewasted": 0, + * "jobdefurl": "sampleURL" + * "jobexecurl": "sampleURL" + * } + * ] + *} + *] + **/ + public static Result restFlowMetricsGraphData(String flowDefId) { + JsonArray datasets = new JsonArray(); + if (flowDefId == null || flowDefId.isEmpty()) { + return ok(new Gson().toJson(datasets)); + } + + List results = getRestFlowAppResults(flowDefId); + + if (results.size() == 0) { + logger.info("No results for Job url"); + } + Map> flowExecIdToJobsMap = + ControllerUtil.limitHistoryResults(ControllerUtil.groupJobs(results, ControllerUtil.GroupBy.FLOW_EXECUTION_ID), results.size(), MAX_HISTORY_LIMIT); + + // Compute the graph data starting from the earliest available execution to latest + List keyList = new ArrayList(flowExecIdToJobsMap.keySet()); + for (int i = keyList.size() - 1; i >= 0; i--) { + IdUrlPair flowExecPair = keyList.get(i); + int flowPerfScore = 0; + JsonArray jobScores = new JsonArray(); + List mrJobsList = Lists.reverse(flowExecIdToJobsMap.get(flowExecPair)); + Map> jobDefIdToJobsMap = ControllerUtil.groupJobs(mrJobsList, ControllerUtil.GroupBy.JOB_DEFINITION_ID); + + long totalFlowMemoryUsed = 0; + long totalFlowMemoryWasted = 0; + long totalFlowDelay = 0; + long totalFlowRuntime = 0; + // Compute the execution records. Note that each entry in the jobDefIdToJobsMap will have at least one AppResult + for (IdUrlPair jobDefPair : jobDefIdToJobsMap.keySet()) { + // Compute job perf score + long totalJobMemoryUsed = 0; + long totalJobMemoryWasted = 0; + long totalJobDelay = 0; + long totalJobRuntime = 0; + + totalJobRuntime = Utils.getTotalRuntime(jobDefIdToJobsMap.get(jobDefPair)); + totalJobDelay = Utils.getTotalWaittime(jobDefIdToJobsMap.get(jobDefPair)); + + for (AppResult job : jobDefIdToJobsMap.get(jobDefPair)) { + totalJobMemoryUsed += job.resourceUsed; + totalJobMemoryWasted += job.resourceWasted; + } + + // A job in jobscores list + JsonObject jobScore = new JsonObject(); + jobScore.addProperty("runtime", totalJobRuntime); + jobScore.addProperty("waittime", totalJobDelay); + jobScore.addProperty("resourceused", totalJobMemoryUsed); + jobScore.addProperty("resourcewasted", totalJobMemoryWasted); + jobScore.addProperty("jobdefurl", jobDefPair.getUrl()); + jobScore.addProperty("jobexecurl", jobDefIdToJobsMap.get(jobDefPair).get(0).jobExecUrl); + + jobScores.add(jobScore); + totalFlowMemoryUsed += totalJobMemoryUsed; + totalFlowMemoryWasted += totalJobMemoryWasted; + } + + totalFlowDelay = Utils.getTotalWaittime(flowExecIdToJobsMap.get(flowExecPair)); + totalFlowRuntime = Utils.getTotalRuntime(flowExecIdToJobsMap.get(flowExecPair)); + + // Execution record + JsonObject dataset = new JsonObject(); + dataset.addProperty("flowtime", mrJobsList.get(mrJobsList.size() - 1).finishTime); + dataset.addProperty("runtime", totalFlowRuntime); + dataset.addProperty("waittime", totalFlowDelay); + dataset.addProperty("resourceused", totalFlowMemoryUsed); + dataset.addProperty("resourcewasted", totalFlowMemoryWasted); + dataset.add("jobmetrics", jobScores); + + datasets.add(dataset); + } + + return ok(new Gson().toJson(datasets)); + } + + /** + * Returns a list of AppResults after quering the FLOW_EXEC_ID from the database + * @return The list of AppResults + */ + private static List getRestJobAppResults(String jobDefId) { + List results = AppResult.find.select( + AppResult.getSearchFields() + "," + AppResult.TABLE.FLOW_EXEC_ID + "," + AppResult.TABLE.FLOW_EXEC_URL) + .where() + .eq(AppResult.TABLE.JOB_DEF_ID, jobDefId) + .order() + .desc(AppResult.TABLE.FINISH_TIME) + .setMaxRows(JOB_HISTORY_LIMIT) + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, "*") + .findList(); + + return results; + } + + /** + * Returns the list of AppResults after quering the FLOW_DEF_ID from the database + * @return The list of AppResults + */ + private static List getRestFlowAppResults(String flowDefId) { + // Fetch available flow executions with latest JOB_HISTORY_LIMIT mr jobs. + List results = AppResult.find.select("*") + .where() + .eq(AppResult.TABLE.FLOW_DEF_ID, flowDefId) + .order() + .desc(AppResult.TABLE.FINISH_TIME) + .setMaxRows(JOB_HISTORY_LIMIT) + .findList(); + + return results; + } + + private static class AppResourceUsageData { + String user; + double resourceUsed; + double resourceWasted; + } + + /** + * Returns the list of users with their resourceUsed and resourceWasted Data for the given time range + * @return list of AppResourceUsageData + **/ + private static Collection getUserResourceUsage(Date start, Date end) { + long resourceUsed = 0; + Map userResourceUsage = new HashMap(); + // Fetch all the appresults for the given time range [startTime, endTime). + List results = AppResult.find.select("*") + .where() + .ge(AppResult.TABLE.START_TIME, start.getTime()) + .lt(AppResult.TABLE.START_TIME, end.getTime()).findList(); + + // aggregate the resourceUsage data at the user level + for (AppResult result : results) { + if (!userResourceUsage.containsKey(result.username)) { + AppResourceUsageData data = new AppResourceUsageData(); + data.user = result.username; + userResourceUsage.put(result.username, data); + } + userResourceUsage.get(result.username).resourceUsed += Utils.MBSecondsToGBHours(result.resourceUsed); + userResourceUsage.get(result.username).resourceWasted += Utils.MBSecondsToGBHours(result.resourceWasted); + } + + return userResourceUsage.values(); } } diff --git a/app/controllers/ControllerUtil.java b/app/controllers/ControllerUtil.java new file mode 100644 index 000000000..8f39193a4 --- /dev/null +++ b/app/controllers/ControllerUtil.java @@ -0,0 +1,140 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package controllers; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import models.AppResult; + + +public class ControllerUtil { + + private static final int JOB_HISTORY_LIMIT = 5000; + + public static enum GroupBy { + JOB_EXECUTION_ID, + JOB_DEFINITION_ID, + FLOW_EXECUTION_ID + } + /** + * Applies a limit on the number of executions to be displayed after trying to maximize the correctness. + * + * Correctness: + * When the number of jobs are less than the JOB_HISTORY_LIMIT, we can show all the executions correctly. However, + * when the number of jobs are greater than the JOB_HISTORY_LIMIT, we cannot simply prune the jobs at that point and + * show the history because we may skip some jobs which belong to the last flow execution. For the flow executions + * we display, we want to ensure we show all the jobs belonging to that flow. + * + * So, when the number of executions are less than 10, we skip the last execution and when the number of executions + * are greater than 10, we skip the last 3 executions just to maximise the correctness. + * + * @param map The results map to be pruned. + * @param size Total number of jobs in the map + * @param execLimit The upper limit on the number of executions to be displayed. + * @return A map after applying the limit. + */ + public static Map> limitHistoryResults(Map> map,int size, + int execLimit) { + + Map> resultMap = new LinkedHashMap>(); + + int limit; + if (size < JOB_HISTORY_LIMIT) { + // No pruning needed. 100% correct. + limit = execLimit; + } else { + Set keySet = map.keySet(); + if (keySet.size() > 10) { + // Prune last 3 executions + limit = keySet.size() > (execLimit + 3) ? execLimit : keySet.size() - 3; + } else { + // Prune the last execution + limit = keySet.size() - 1; + } + } + + // Filtered results + int i = 1; + for (Map.Entry> entry : map.entrySet()) { + if (i > limit) { + break; + } + resultMap.put(entry.getKey(), entry.getValue()); + i++; + } + + return resultMap; + } + + + /** + * Grouping a list of AppResult by GroupBy enum. + * + * @param results The list of jobs of type AppResult to be grouped. + * @param groupBy The field by which the results have to be grouped. + * @return A map with the grouped field as the key and the list of jobs as the value. + */ + public static Map> groupJobs(List results, GroupBy groupBy) { + Map> groupMap = new LinkedHashMap>(); + Map idUrlMap = new HashMap(); + + for (AppResult result : results) { + String idField = null; + String urlField = null; + switch (groupBy) { + case JOB_EXECUTION_ID: + idField = result.jobExecId; + urlField = result.jobExecUrl; + break; + case JOB_DEFINITION_ID: + idField = result.jobDefId; + urlField = result.jobDefUrl; + break; + case FLOW_EXECUTION_ID: + idField = result.flowExecId; + urlField = result.flowExecUrl; + break; + } + if (!idUrlMap.containsKey(idField)) { + idUrlMap.put(idField, urlField); + } + + if (groupMap.containsKey(idField)) { + groupMap.get(idField).add(result); + } else { + List list = new ArrayList(); + list.add(result); + groupMap.put(idField, list); + } + } + + // Construct the final result map with the key as a (id, url) pair. + Map> resultMap = new LinkedHashMap>(); + for (Map.Entry> entry : groupMap.entrySet()) { + String key = entry.getKey(); + List value = entry.getValue(); + resultMap.put(new IdUrlPair(key, idUrlMap.get(key)), value); + } + + return resultMap; + } + +} diff --git a/app/controllers/IdUrlPair.java b/app/controllers/IdUrlPair.java new file mode 100644 index 000000000..8e7f14cbf --- /dev/null +++ b/app/controllers/IdUrlPair.java @@ -0,0 +1,68 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package controllers; + + +public class IdUrlPair { + + public final String id; + public final String url; + + public IdUrlPair(String id, String url) { + this.id = id; + this.url = url; + } + + public String getId() { + return id; + } + + public String getUrl() { + return url; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((id == null) ? 0 : id.hashCode()); + return result; + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final IdUrlPair other = (IdUrlPair) obj; + if (id == null) { + if (other.getId() != null) { + return false; + } + } else if (!id.equals(other.getId())) { + return false; + } + + return true; + } +} diff --git a/app/controllers/MetricsController.java b/app/controllers/MetricsController.java new file mode 100644 index 000000000..22818139c --- /dev/null +++ b/app/controllers/MetricsController.java @@ -0,0 +1,242 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package controllers; + +import com.codahale.metrics.Gauge; +import com.codahale.metrics.Histogram; +import com.codahale.metrics.JmxReporter; +import com.codahale.metrics.Meter; +import com.codahale.metrics.MetricRegistry; +import com.codahale.metrics.Timer; +import com.codahale.metrics.health.HealthCheckRegistry; +import com.codahale.metrics.health.jvm.ThreadDeadlockHealthCheck; +import com.codahale.metrics.jvm.MemoryUsageGaugeSet; +import com.linkedin.drelephant.analysis.AnalyticJob; +import com.linkedin.drelephant.metrics.CustomGarbageCollectorMetricSet; +import org.apache.log4j.Logger; + +import models.AppResult; +import play.Configuration; +import play.libs.Json; +import play.mvc.Controller; +import play.mvc.Result; + +import static com.codahale.metrics.MetricRegistry.name; + + +/** + * This class enables the use of Dropwizard + * metrics for the application. + * + *

+ * The following endpoints are exposed. + *
    /ping - Reports application status if up
+ *
    /healthcheck - Returns status in Json format from all the implemented healthchecks
+ *
    /metrics - Returns all the metrics in Json format
+ */ +public class MetricsController extends Controller { + private static final Logger LOGGER = Logger.getLogger(MetricsController.class); + + private static final String METRICS_NOT_ENABLED = "Metrics not enabled"; + private static final String HEALTHCHECK_NOT_ENABLED = "Healthcheck not enabled"; + private static final String UNINITIALIZED_MESSAGE = "Metrics should be initialized before use."; + + private static MetricRegistry _metricRegistry = null; + private static HealthCheckRegistry _healthCheckRegistry = null; + + private static int _queueSize = -1; + private static int _retryQueueSize = -1; + private static Meter _skippedJobs; + private static Meter _processedJobs; + private static Histogram _jobProcessingTime; + + /** + * Initializer method for the metrics registry. Call this method before registering + * new metrics with the registry. + */ + public static void init() { + // Metrics registries will be initialized only if enabled + if(!Configuration.root().getBoolean("metrics", false)) { + LOGGER.debug("Metrics not enabled in the conf file."); + return; + } + + // Metrics & healthcheck registries will be initialized only once + if(_metricRegistry != null) { + LOGGER.debug("Metric registries already initialized."); + return; + } + + _metricRegistry = new MetricRegistry(); + + String className = AnalyticJob.class.getSimpleName(); + + _skippedJobs = _metricRegistry.meter(name(className, "skippedJobs", "count")); + _processedJobs = _metricRegistry.meter(name(className, "processedJobs", "count")); + _jobProcessingTime = _metricRegistry.histogram(name(className, "jobProcessingTime", "ms")); + _metricRegistry.register(name(className, "jobQueue", "size"), new Gauge() { + @Override + public Integer getValue() { + return _queueSize; + } + }); + _metricRegistry.register(name(className, "lastDayJobs", "count"), new Gauge() { + private static final long DAY = 24 * 60 * 60 * 1000; + private static final long UPDATE_DELAY = 60 * 1000; + + private long _lastUpdate = 0; + private int _count = -1; + + @Override + public Integer getValue() { + long now = System.currentTimeMillis(); + if (now - _lastUpdate > UPDATE_DELAY) { + _count = AppResult.find.where() + .gt(AppResult.TABLE.FINISH_TIME, now - DAY) + .findRowCount(); + _lastUpdate = now; + } + return _count; + } + }); + _metricRegistry.register(name(className, "retryQueue", "size"), new Gauge() { + @Override + public Integer getValue() { + return _retryQueueSize; + } + }); + _metricRegistry.registerAll(new CustomGarbageCollectorMetricSet()); + _metricRegistry.registerAll(new MemoryUsageGaugeSet()); + + JmxReporter.forRegistry(_metricRegistry).build().start(); + + _healthCheckRegistry = new HealthCheckRegistry(); + + _healthCheckRegistry.register("ThreadDeadlockHealthCheck", + new ThreadDeadlockHealthCheck()); + } + + /** + * + * @param name to be used while registering the timer. + * @return Returns Timer.Context if metrics is enabled + * and null otherwise. + */ + public static Timer.Context startTimer(String name) { + if(_metricRegistry != null) { + return _metricRegistry.timer(name).time(); + } else { + throw new NullPointerException(UNINITIALIZED_MESSAGE); + } + } + + /** + * + * @return The MetricRegistry if initialized. + */ + public static MetricRegistry getMetricRegistry() { + if (_metricRegistry != null) { + return _metricRegistry; + } else { + throw new NullPointerException(UNINITIALIZED_MESSAGE); + } + } + + /** + * Set the current job queue size in the metric registry. + * @param size + */ + public static void setQueueSize(int size) { + _queueSize = size; + } + + /** + * Set the retry job queue size in the metric registry. + * @param retryQueueSize + */ + public static void setRetryQueueSize(int retryQueueSize) { + _retryQueueSize = retryQueueSize; + } + + /** + * Increments the meter for keeping track of processed jobs in metrics registry. + */ + public static void markProcessedJobs() { + if(_processedJobs != null) { + _processedJobs.mark(); + } + } + + /** + * Sets the time in milliseconds taken to process a job. + * @param processingTimeTaken + */ + public static void setJobProcessingTime(long processingTimeTaken) { + if(_jobProcessingTime != null) { + _jobProcessingTime.update(processingTimeTaken); + } + } + + /** + * A meter for marking skipped jobs. + * Jobs which doesn't have any data or which exceeds the set number of + * retries can be marked as skipped. + */ + public static void markSkippedJob() { + if(_skippedJobs != null) { + _skippedJobs.mark(); + } + } + + /** + * The endpoint /ping + * Ping will respond with the message 'alive' if the application is running. + * + * @return Will return 'alive' if Dr. Elephant is Up. + */ + public static Result ping() { + return ok(Json.toJson("alive")); + } + + /** + * The endpoint /metrics + * Endpoint can be queried if metrics is enabled. + * + * @return Will return all the metrics in Json format. + */ + public static Result index() { + if (_metricRegistry != null) { + return ok(Json.toJson(_metricRegistry)); + } else { + return ok(Json.toJson(METRICS_NOT_ENABLED)); + } + } + + /** + * The endpoint /healthcheck + * Endpoint can be queried if metrics is enabled. + * + * @return Will return all the healthcheck metrics in Json format. + */ + public static Result healthcheck() { + if (_healthCheckRegistry != null) { + return ok(Json.toJson(_healthCheckRegistry.runHealthChecks())); + } else { + return ok(Json.toJson(HEALTHCHECK_NOT_ENABLED)); + } + } +} diff --git a/app/controllers/PaginationStats.java b/app/controllers/PaginationStats.java new file mode 100644 index 000000000..02f04196a --- /dev/null +++ b/app/controllers/PaginationStats.java @@ -0,0 +1,118 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package controllers; + +/** + * This class handles the pagination of results in search page + */ +public class PaginationStats { + public int currentPage = 1; + public int paginationBarStartIndex = 1; + public int paginationBarEndIndex = 1; + public int pageLength; + public int pageBarLength; + public String queryString = null; + + /** + * The constructor for the PaginationStats + * + * @param pageLength The number of results per page + * @param pageBarLength The Length of the pagination bar at the bottom + */ + public PaginationStats(int pageLength, int pageBarLength) { + this.pageLength = pageLength; + this.pageBarLength = pageBarLength; + } + + /** + * Return the current page number + * @return page number + */ + public int getCurrentPage() { + return currentPage; + } + + /** + * Set the current page number + * @param currentPage The number to set + */ + public void setCurrentPage(int currentPage) { + if (currentPage < 1) { + this.currentPage = 1; + } else { + this.currentPage = currentPage; + } + } + + /** + * Computes the paginationBarStartIndex. It is computed such that the currentPage + * remains at the center of the Pagination Bar. + * + * @return The start Index of the Pagination bar + */ + public int getPaginationBarStartIndex() { + this.paginationBarStartIndex = Math.max(this.currentPage - this.pageBarLength / 2, 1); + return this.paginationBarStartIndex; + } + + /** + * Compute the Pagination Bar end index depending on the number of serach results + * to be displayed. + * + * @param resultSize The fetched result size + * @return The end index of the Pagination bar + */ + public int computePaginationBarEndIndex(int resultSize) { + this.paginationBarEndIndex = this.paginationBarStartIndex + (resultSize - 1) / this.pageLength; + return this.paginationBarEndIndex; + } + + /** + * Returns the pagination bar end Index + */ + public int getPaginationBarEndIndex() { + return this.paginationBarEndIndex; + } + + /** + * Returns the query string + */ + public String getQueryString() { + return queryString; + } + + /** + * Sets the query string + */ + public void setQueryString(String queryString) { + this.queryString = queryString; + } + + /** + * Returns the Pagination bar length + */ + public int getPageBarLength() { + return pageBarLength; + } + + /** + * Returns the number of results per page + */ + public int getPageLength() { + return pageLength; + } +} diff --git a/app/controllers/api/v1/.Web.java.swp b/app/controllers/api/v1/.Web.java.swp new file mode 100644 index 000000000..af7322bab Binary files /dev/null and b/app/controllers/api/v1/.Web.java.swp differ diff --git a/app/controllers/api/v1/JsonKeys.java b/app/controllers/api/v1/JsonKeys.java new file mode 100644 index 000000000..32df41534 --- /dev/null +++ b/app/controllers/api/v1/JsonKeys.java @@ -0,0 +1,95 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package controllers.api.v1; + +public class JsonKeys { + + // Common keys + public static final String USERNAME = "username"; + public static final String NAME = "name"; + public static final String SEVERITY = "severity"; + public static final String ID = "id"; + public static final String RESOURCE_USED = "resourceused"; + public static final String RESOURCE_WASTED = "resourcewasted"; + public static final String RUNTIME = "runtime"; + public static final String WAITTIME = "waittime"; + public static final String START_TIME = "starttime"; + public static final String FINISH_TIME = "finishtime"; + public static final String COUNT = "count"; + public static final String VALUE = "value"; + public static final String QUEUE = "queue"; + public static final String SEARCH_OPTS = "search-options"; + public static final String START = "start"; + public static final String END = "end"; + public static final String SEARCH_RESULTS = "search-results"; + public static final String JOB_TYPES = "jobtypes"; + public static final String HEURISTICS = "heuristics"; + public static final String SEVERITIES = "severities"; + public static final String JOB_CATEGORY = "jobcategory"; + public static final String TOTAL = "total"; + public static final String SUMMARIES = "summaries"; + public static final String SCHEDULER = "scheduler"; + public static final String CRITICAL = "critical"; + public static final String SEVERE = "severe"; + public static final String MODERATE = "moderate"; + public static final String LOW = "low"; + public static final String NONE = "none"; + public static final String DASHBOARD_SUMMARIES = "dashboard-summaries"; + public static final String USER_RESULTS = "user-results"; + public static final String USER_DETAILS = "user-details"; + public static final String EXCEPTION_STATUSES = "exception-statuses"; + public static final String EXCEPTION_STATUS = "exception-status"; + public static final String SCHEDULERS = "schedulers"; + public static final String EXCEPTION_ENABLED = "exceptionenabled"; + public static String EXCEPTION_SUMMARY = "exceptionSummary"; + public static String STATUS = "status"; + public static String TYPE = "type"; + public static String TASKS = "tasks"; + public static String WORKFLOW_EXCEPTIONS = "workflow-exceptions"; + + + // Workflows + public static final String WORKFLOW_SUMMARIES = "workflow-summaries"; + public static final String WORKFLOWS = "workflows"; + public static final String FLOW_DEF_ID = "flowdefid"; + public static final String FLOW_EXEC_ID = "flowexecid"; + public static final String TOTAL_WORKFLOWS = "totalworkflows"; + + // Jobs + public static final String JOB_SUMMARIES = "job-summaries"; + public static final String JOBS_SEVERITY = "jobsseverity"; + public static final String JOB_EXEC_ID = "jobexecid"; + public static final String JOB_DEF_ID = "jobdefid"; + public static final String JOB_TYPE = "jobtype"; + public static final String JOB_NAME = "jobname"; + public static final String JOBS = "jobs"; + public static final String JOBSSUMMARIES = "jobssummaries"; + public static final String TOTAL_JOBS = "totaljobs"; + + + // Tasks + public static final String APPLICATION_SUMMARIES = "application-summaries"; + public static final String HEURISTICS_SUMMARY = "heuristicsummary"; + public static final String TASKS_SEVERITY = "tasksseverity"; + public static final String TASKS_SUMMARIES = "taskssummaries"; + public static final String MAPREDUCE_JOB_NAME = "mapreducejobname"; + public static final String TRACKING_URL = "trackingurl"; + public static final String DETAILS = "details"; + public static final String YARN_APP_HEURISTIC_RESULTS = "yarnappheuristicresults"; + public static final String APPLICATIONS = "applications"; + public static final String TOTAL_APPLICATIONS = "totalapplications"; +} diff --git a/app/controllers/api/v1/Web.java b/app/controllers/api/v1/Web.java new file mode 100644 index 000000000..979db82aa --- /dev/null +++ b/app/controllers/api/v1/Web.java @@ -0,0 +1,2000 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package controllers.api.v1; + +import com.avaje.ebean.Query; +import com.avaje.ebean.Junction; +import com.avaje.ebean.ExpressionList; +import com.avaje.ebean.SqlRow; +import com.avaje.ebean.SqlQuery; +import com.avaje.ebean.Ebean; + +import com.google.common.collect.Lists; +import com.google.gson.Gson; +import com.google.gson.JsonArray; +import com.google.gson.JsonObject; +import com.linkedin.drelephant.ElephantContext; +import com.linkedin.drelephant.analysis.ApplicationType; +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.JobType; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.exceptions.ExceptionFinder; +import com.linkedin.drelephant.exceptions.HadoopException; +import com.linkedin.drelephant.security.HadoopSecurity; +import com.linkedin.drelephant.util.InfoExtractor; +import com.linkedin.drelephant.util.Utils; +import controllers.ControllerUtil; +import controllers.IdUrlPair; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.Arrays; + +import javax.naming.AuthenticationException; +import models.AppHeuristicResult; +import models.AppHeuristicResultDetails; +import models.AppResult; +import org.apache.commons.lang.StringUtils; +import org.apache.log4j.Logger; +import play.data.DynamicForm; +import play.data.Form; +import play.mvc.Controller; +import play.mvc.Result; +import controllers.Application; + + +/** + * The Web controller defines the rest interfaces for the Dr. Elephant User interface. + */ +public class Web extends Controller { + + private static final Logger logger = Logger.getLogger(Web.class); + + private static final long DAY = 24 * 60 * 60 * 1000; + private static final long FETCH_DELAY = 60 * 1000; + + private static final int MAX_APPLICATIONS = 50; + private static final int MAX_APPLICATIONS_IN_WORKFLOW = 5000; + private static final int MAX_APPLICATIONS_IN_JOB = 5000; + private static final int MAX_FLOW_LIMIT = 25; + private static final int MAX_JOB_LIMIT = 25; + private static final int SEARCH_DEFAULT_PAGE_OFFSET = 0; + private static final int SEARCH_DEFAULT_PAGE_LIMIT = 25; + private static final int SEARCH_APPLICATION_MAX_OFFSET = 500; + + private static long _lastFetch = 0; + private static int _numJobsAnalyzed = 0; + private static int _numJobsCritical = 0; + private static int _numJobsSevere = 0; + private static int _numJobsModerate = 0; + private static int _numJobsLow = 0; + private static int _numJobsNone = 0; + + /** + * Returns the json object for the dashboard summaries of jobs analzyed in last day. + */ + public static Result restDashboardSummaries() { + + long now = System.currentTimeMillis(); + long finishDate = now - DAY; + + //Update statistics only after FETCH_DELAY + if (now - _lastFetch > FETCH_DELAY) { + _numJobsAnalyzed = AppResult.find.where() + .gt(AppResult.TABLE.FINISH_TIME, finishDate) + .findRowCount(); + _numJobsCritical = AppResult.find.where() + .gt(AppResult.TABLE.FINISH_TIME, finishDate) + .eq(AppResult.TABLE.SEVERITY, Severity.CRITICAL.getValue()) + .findRowCount(); + _numJobsSevere = AppResult.find.where() + .gt(AppResult.TABLE.FINISH_TIME, finishDate) + .eq(AppResult.TABLE.SEVERITY, Severity.SEVERE.getValue()) + .findRowCount(); + _numJobsModerate = AppResult.find.where().gt(AppResult.TABLE.FINISH_TIME, finishDate) + .eq(AppResult.TABLE.SEVERITY, Severity.MODERATE.getValue()) + .findRowCount(); + _numJobsLow = AppResult.find.where().gt(AppResult.TABLE.FINISH_TIME, finishDate) + .eq(AppResult.TABLE.SEVERITY, Severity.LOW.getValue()) + .findRowCount(); + _numJobsNone = AppResult.find.where().gt(AppResult.TABLE.FINISH_TIME, finishDate) + .eq(AppResult.TABLE.SEVERITY, Severity.NONE.getValue()) + .findRowCount(); + _lastFetch = now; + } + + JsonObject dashboard = new JsonObject(); + dashboard.addProperty(JsonKeys.ID, "dashboard"); + dashboard.addProperty(JsonKeys.TOTAL, _numJobsAnalyzed); + dashboard.addProperty(JsonKeys.CRITICAL, _numJobsCritical); + dashboard.addProperty(JsonKeys.SEVERE, _numJobsSevere); + dashboard.addProperty(JsonKeys.MODERATE, _numJobsModerate); + dashboard.addProperty(JsonKeys.LOW, _numJobsLow); + dashboard.addProperty(JsonKeys.NONE, _numJobsNone); + JsonObject parent = new JsonObject(); + parent.add(JsonKeys.DASHBOARD_SUMMARIES, dashboard); + + return ok(new Gson().toJson(parent)); + } + + /** + * Returns the list of AppResults for the given username limit by maxApplications + * @param username The username for which applications need to be fetched. + * @param maxApplications The max number of applications that should be fetched + * @return The list of Applications that should for the given username limit by maxApplications + */ + private static List getApplications(String username, int maxApplications) { + List results = AppResult.find.select("*").where().eq(AppResult.TABLE.USERNAME, username).order() + .desc(AppResult.TABLE.FINISH_TIME).setMaxRows(maxApplications).findList(); + return results; + } + + /** + * Returns the list of AppResults limit by maxApplications + * @param maxApplications The max number of applications that should be fetched + * @return The list of Applications limit by maxApplications + */ + private static List getApplications(int maxApplications) { + List results = + AppResult.find.select("*").order().desc(AppResult.TABLE.FINISH_TIME).setMaxRows(maxApplications).findList(); + return results; + } + + /** + * Returns the list of AppResults scheduled by a scheduler for the given username limit by maxApplications. + * @param username The username for which applications need to be fetched. + * @param maxApplications The max number of applications that should be fetched + * @return The list of Applications scheduled by a scheduler that should be fetched for the given username limit by maxApplications + */ + private static List getSchedulerApplications(String username, int maxApplications) { + List results = + AppResult.find.select("*").where().eq(AppResult.TABLE.USERNAME, username).ne(AppResult.TABLE.FLOW_EXEC_ID, null) + .ne(AppResult.TABLE.FLOW_EXEC_ID, "").order().desc(AppResult.TABLE.FINISH_TIME).setMaxRows(maxApplications) + .findList(); + return results; + } + + /** + * Returns the list of AppResults scheduled by a scheduler limit by maxApplications + * @param maxApplications The max number of applications that should be fetched + * @return The list of Applications scheduled by a scheduler limit by maxApplications + */ + private static List getSchedulerApplications(int maxApplications) { + List results = + AppResult.find.select("*").where().ne(AppResult.TABLE.FLOW_EXEC_ID, null).ne(AppResult.TABLE.FLOW_EXEC_ID, "") + .order().desc(AppResult.TABLE.FINISH_TIME).setMaxRows(maxApplications).findList(); + return results; + } + + /** + * Returns a list of AppResult with the the given flowExecId + * @param flowExecId The flow execution id of the flow + * @return The list of AppResult filtered by flow execution id + */ + private static List getRestFlowResultsFromFlowExecutionId(String flowExecId) { + List results = AppResult.find.select("*").where().eq(AppResult.TABLE.FLOW_EXEC_ID, flowExecId).order() + .desc(AppResult.TABLE.FINISH_TIME).findList(); + return results; + } + + ; + + /** + * Returns a list of AppResult with the given jobExecId + * @param jobExecId The job execution id of the job + * @return The list of AppResult filtered by job execution id + */ + private static List getRestJobResultsFromJobExecutionId(String jobExecId) { + List results = + AppResult.find.select(AppResult.getSearchFields()).where().eq(AppResult.TABLE.JOB_EXEC_ID, jobExecId).order() + .desc(AppResult.TABLE.FINISH_TIME) + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, AppHeuristicResult.getSearchFields()).findList(); + return results; + } + + /** + * Returns the AppResult with the given applicationId + * @param applicationId The application id of the application + * @return The AppResult for the given application Id + */ + private static AppResult getAppResultFromApplicationId(String applicationId) { + AppResult result = AppResult.find.select("*").fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, "*") + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS + "." + AppHeuristicResult.TABLE.APP_HEURISTIC_RESULT_DETAILS, "*") + .where().idEq(applicationId).order().desc(AppResult.TABLE.FINISH_TIME).findUnique(); + return result; + } + + /** + * This method returns the json object for the application-summaries based on the username + * @param username The username for which application-summaries json must be returned + * @return The application-summaries json for the given username + * response object: + *
+   *{
+   *  "application-summaries": [
+   *  {
+   *    "id": "sample_app_0000000001",
+   *      "username": "user",
+   *      "starttime": 1471910835628,
+   *      "finishtime": 1471911099238,
+   *      "runtime": 263610,
+   *      "waittime": 46234,
+   *      "resourceused": 101382144,
+   *      "resourcewasted": 15993417,
+   *      "severity": "Moderate",
+   *      "heuristicsummary": [
+   *    {
+   *      "name": "Mapper Data Skew",
+   *        "severity": "None"
+   *    },
+   *    {
+   *      "name": "Mapper GC",
+   *        "severity": "None"
+   *    },
+   *    {
+   *      "name": "Mapper Time",
+   *        "severity": "Moderate"
+   *    },
+   *    {
+   *      "name": "Mapper Speed",
+   *        "severity": "None"
+   *    },
+   *    {
+   *      "name": "Mapper Spill",
+   *        "severity": "None"
+   *    },
+   *    {
+   *      "name": "Mapper Memory",
+   *        "severity": "None"
+   *    },
+   *    {
+   *      "name": "Reducer Data Skew",
+   *        "severity": "None"
+   *    },
+   *    {
+   *      "name": "Reducer GC",
+   *        "severity": "None"
+   *    },
+   *    {
+   *      "name": "Reducer Time",
+   *        "severity": "None"
+   *    },
+   *    {
+   *      "name": "Reducer Memory",
+   *        "severity": "None"
+   *    },
+   *    {
+   *      "name": "Shuffle & Sort",
+   *        "severity": "Low"
+   *    }
+   *    ]
+   *  }
+   *  ]
+   *}
+   * 
+ * */ + public static Result restApplicationSummariesForUser(String username) { + JsonArray applicationSummaryArray = new JsonArray(); + + List results = null; + if (username == null || username.isEmpty()) { + results = getApplications(MAX_APPLICATIONS); + } else { + results = getApplications(username, MAX_APPLICATIONS); + } + + for (AppResult application : results) { + JsonObject applicationObject = new JsonObject(); + JsonArray heuristicsArray = new JsonArray(); + List appHeuristicResult = application.yarnAppHeuristicResults; + + for (AppHeuristicResult heuristic : appHeuristicResult) { + JsonObject heuristicObject = new JsonObject(); + heuristicObject.addProperty(JsonKeys.NAME, heuristic.heuristicName); + heuristicObject.addProperty(JsonKeys.SEVERITY, heuristic.severity.getText()); + heuristicsArray.add(heuristicObject); + } + + applicationObject.addProperty(JsonKeys.ID, application.id); + applicationObject.addProperty(JsonKeys.USERNAME, application.username); + applicationObject.addProperty(JsonKeys.JOB_NAME, application.jobName); + applicationObject.addProperty(JsonKeys.JOB_TYPE, application.jobType); + applicationObject.addProperty(JsonKeys.START_TIME, application.startTime); + applicationObject.addProperty(JsonKeys.FINISH_TIME, application.finishTime); + applicationObject.addProperty(JsonKeys.RUNTIME, application.finishTime - application.startTime); + applicationObject.addProperty(JsonKeys.WAITTIME, application.totalDelay); + applicationObject.addProperty(JsonKeys.RESOURCE_USED, application.resourceUsed); + applicationObject.addProperty(JsonKeys.RESOURCE_WASTED, application.resourceWasted); + applicationObject.addProperty(JsonKeys.QUEUE, application.queueName); + applicationObject.addProperty(JsonKeys.SEVERITY, application.severity.getText()); + + applicationObject.add(JsonKeys.HEURISTICS_SUMMARY, heuristicsArray); + + applicationSummaryArray.add(applicationObject); + } + + JsonArray sortedApplicationSummaryArray = getSortedJsonArrayByFinishTime(applicationSummaryArray); + + JsonObject parent = new JsonObject(); + parent.add(JsonKeys.APPLICATION_SUMMARIES, sortedApplicationSummaryArray); + return ok(new Gson().toJson(parent)); + } + + /** + * This method returns the json object for job-summaries for the given user + * @param username The given username for which job-summaries json object should be returned + * @return The job-summaries json object for the given username + * response object: + *
+   *{
+   *  "job-summaries": [
+   *  {
+   *    "id": "job-exec-id",
+   *      "jobname": "jobname",
+   *      "jobtype": "Pig",
+   *      "username": "username",
+   *      "starttime": 1471910835628,
+   *      "finishtime": 1471911099238,
+   *      "runtime": 263610,
+   *      "waittime": 46234,
+   *      "resourceused": 101382144,
+   *      "resourcewasted": 15993417,
+   *      "severity": "Moderate",
+   *      "scheduler": "azkaban",
+   *      "tasksseverity": [
+   *    {
+   *      "severity": "Moderate",
+   *        "count": 1
+   *    }
+   *    ]
+   *  }
+   *  ]
+   *}
+   * 
+ **/ + public static Result restJobSummariesForUser(String username) { + + JsonArray jobSummaryArray = new JsonArray(); + + List results = null; + if (username == null || username.isEmpty()) { + results = getSchedulerApplications(MAX_APPLICATIONS_IN_WORKFLOW); + } else { + results = getSchedulerApplications(username, MAX_APPLICATIONS_IN_WORKFLOW); + } + + Map> jobExecIdToJobsMap = ControllerUtil + .limitHistoryResults(ControllerUtil.groupJobs(results, ControllerUtil.GroupBy.JOB_EXECUTION_ID), results.size(), + MAX_JOB_LIMIT); + + for (IdUrlPair jobDefPair : jobExecIdToJobsMap.keySet()) { + long totalJobMemoryUsed = 0L; + long totalJobMemoryWasted = 0L; + long totalJobDelay = 0L; + long totalJobRuntime = 0L; + long jobStartTime = Long.MAX_VALUE; + long jobEndTime = 0; + Severity jobSeverity = Severity.NONE; + String jobType = null; + String jobId = jobDefPair.getId(); + String jobName = ""; + String user = null; + String queueName = ""; + String scheduler = ""; + String jobDefId = ""; + String jobExecId = ""; + + Map applicationSeverityCount = new HashMap(); + + for (AppResult application : jobExecIdToJobsMap.get(jobDefPair)) { + + totalJobMemoryUsed += application.resourceUsed; + totalJobMemoryWasted += application.resourceWasted; + + jobType = application.jobType; + jobName = application.jobName; + jobDefId = application.jobDefId; + jobExecId = application.jobExecId; + + queueName = application.queueName; + scheduler = application.scheduler; + + if (application.startTime < jobStartTime) { + jobStartTime = application.startTime; + } + + if (application.finishTime > jobEndTime) { + jobEndTime = application.finishTime; + } + + if (application.severity.getValue() > jobSeverity.getValue()) { + jobSeverity = application.severity; + } + + if (applicationSeverityCount.containsKey(application.severity)) { + applicationSeverityCount.put(application.severity, applicationSeverityCount.get(application.severity) + 1L); + } else { + applicationSeverityCount.put(application.severity, 1L); + } + + user = application.username; + } + + JsonArray applicationSeverity = new JsonArray(); + List keys = getSortedSeverityKeys(applicationSeverityCount.keySet()); + for (Severity key : keys) { + JsonObject severityObject = new JsonObject(); + severityObject.addProperty(JsonKeys.SEVERITY, key.getText()); + severityObject.addProperty(JsonKeys.COUNT, applicationSeverityCount.get(key)); + applicationSeverity.add(severityObject); + } + + totalJobDelay = Utils.getTotalWaittime(jobExecIdToJobsMap.get(jobDefPair)); + totalJobRuntime = Utils.getTotalRuntime(jobExecIdToJobsMap.get(jobDefPair)); + + JsonObject jobObject = new JsonObject(); + jobObject.addProperty(JsonKeys.ID, jobId); + jobObject.addProperty(JsonKeys.JOB_NAME, jobName); + jobObject.addProperty(JsonKeys.JOB_TYPE, jobType); + jobObject.addProperty(JsonKeys.USERNAME, user); + jobObject.addProperty(JsonKeys.START_TIME, jobStartTime); + jobObject.addProperty(JsonKeys.FINISH_TIME, jobEndTime); + jobObject.addProperty(JsonKeys.RUNTIME, totalJobRuntime); + jobObject.addProperty(JsonKeys.WAITTIME, totalJobDelay); + jobObject.addProperty(JsonKeys.RESOURCE_USED, totalJobMemoryUsed); + jobObject.addProperty(JsonKeys.RESOURCE_WASTED, totalJobMemoryWasted); + jobObject.addProperty(JsonKeys.QUEUE, queueName); + jobObject.addProperty(JsonKeys.SCHEDULER, scheduler); + jobObject.addProperty(JsonKeys.SEVERITY, jobSeverity.getText()); + jobObject.addProperty(JsonKeys.JOB_DEF_ID, jobDefId); + jobObject.addProperty(JsonKeys.JOB_EXEC_ID, jobExecId); + + jobObject.add(JsonKeys.TASKS_SEVERITY, applicationSeverity); + + jobSummaryArray.add(jobObject); + } + + JsonArray sortedJobSummaryArray = getSortedJsonArrayByFinishTime(jobSummaryArray); + + JsonObject parent = new JsonObject(); + parent.add(JsonKeys.JOB_SUMMARIES, sortedJobSummaryArray); + return ok(new Gson().toJson(parent)); + } + + /** + * This method returns the workflow-summaries json response + * @param username The username for which workflow-summaries must be returned + * @return The json response of the workflow-summaries for the given user + * Response data: + *
+   *{
+   *  "workflow-summaries": [
+   *  {
+   *    "id": "http://workflow-id",
+   *      "username": "search",
+   *      "starttime": 1468818098875,
+   *      "finishtime": 1468819946683,
+   *      "runtime": 1855532,
+   *      "waittime": 365368,
+   *      "resourceused": 3306438656,
+   *      "resourcewasted": 516978829,
+   *      "severity": "Severe",
+   *      "jobsseverity": [
+   *    {
+   *      "severity": "Severe",
+   *        "count": 26
+   *    },
+   *    {
+   *      "severity": "Moderate",
+   *        "count": 3
+   *    },
+   *    {
+   *      "severity": "Low",
+   *        "count": 1
+   *    },
+   *    {
+   *      "severity": "None",
+   *        "count": 16
+   *    }
+   *    ]
+   *  }
+   *  ]
+   *}
+   * 
+ */ + public static Result restWorkflowSummariesForUser(String username) { + JsonArray workflowSummaryArray = new JsonArray(); + List results = null; + if (username == null || username.isEmpty()) { + results = getSchedulerApplications(MAX_APPLICATIONS_IN_WORKFLOW); + } else { + results = getSchedulerApplications(username, MAX_APPLICATIONS_IN_WORKFLOW); + } + + Map> flowExecIdToJobsMap = ControllerUtil + .limitHistoryResults(ControllerUtil.groupJobs(results, ControllerUtil.GroupBy.FLOW_EXECUTION_ID), + results.size(), MAX_FLOW_LIMIT); + + List keyList = new ArrayList(flowExecIdToJobsMap.keySet()); + + for (IdUrlPair flowExecPair : keyList) { + + List mrJobsList = Lists.reverse(flowExecIdToJobsMap.get(flowExecPair)); + + Map> jobDefIdToJobsMap = + ControllerUtil.groupJobs(mrJobsList, ControllerUtil.GroupBy.JOB_EXECUTION_ID); + + Map jobSeverityCount = new HashMap(); + long totalFlowMemoryUsed = 0; + long totalFlowMemoryWasted = 0; + long totalFlowDelay = 0; + long totalFlowRuntime = 0; + Severity flowSeverity = Severity.NONE; + + for (IdUrlPair jobDefPair : jobDefIdToJobsMap.keySet()) { + + Severity jobseverity = Severity.NONE; + long totalJobMemoryUsed = 0; + long totalJobMemoryWasted = 0; + + for (AppResult job : jobDefIdToJobsMap.get(jobDefPair)) { + + totalJobMemoryUsed += job.resourceUsed; + totalJobMemoryWasted += job.resourceWasted; + + if (job.severity.getValue() > jobseverity.getValue()) { + jobseverity = job.severity; + } + } + + if (jobSeverityCount.containsKey(jobseverity)) { + jobSeverityCount.put(jobseverity, jobSeverityCount.get(jobseverity) + 1); + } else { + jobSeverityCount.put(jobseverity, 1L); + } + + if (jobseverity.getValue() > flowSeverity.getValue()) { + flowSeverity = jobseverity; + } + + totalFlowMemoryUsed += totalJobMemoryUsed; + totalFlowMemoryWasted += totalJobMemoryWasted; + } + + totalFlowDelay = Utils.getTotalWaittime(flowExecIdToJobsMap.get(flowExecPair)); + totalFlowRuntime = Utils.getTotalRuntime(flowExecIdToJobsMap.get(flowExecPair)); + + JsonArray jobSeverity = new JsonArray(); + List keys = getSortedSeverityKeys(jobSeverityCount.keySet()); + for (Severity key : keys) { + JsonObject severityObject = new JsonObject(); + severityObject.addProperty(JsonKeys.SEVERITY, key.getText()); + severityObject.addProperty(JsonKeys.COUNT, jobSeverityCount.get(key)); + jobSeverity.add(severityObject); + } + + // Execution record + JsonObject dataset = new JsonObject(); + dataset.addProperty(JsonKeys.ID, mrJobsList.get(0).flowExecId); + dataset.addProperty(JsonKeys.USERNAME, mrJobsList.get(0).username); + dataset.addProperty(JsonKeys.START_TIME, mrJobsList.get(0).startTime); + dataset.addProperty(JsonKeys.FINISH_TIME, mrJobsList.get(mrJobsList.size() - 1).finishTime); + dataset.addProperty(JsonKeys.RUNTIME, totalFlowRuntime); + dataset.addProperty(JsonKeys.WAITTIME, totalFlowDelay); + dataset.addProperty(JsonKeys.RESOURCE_USED, totalFlowMemoryUsed); + dataset.addProperty(JsonKeys.RESOURCE_WASTED, totalFlowMemoryWasted); + dataset.addProperty(JsonKeys.QUEUE, mrJobsList.get(0).queueName); + dataset.addProperty(JsonKeys.SEVERITY, flowSeverity.getText()); + dataset.addProperty(JsonKeys.SCHEDULER, mrJobsList.get(0).scheduler); + dataset.addProperty(JsonKeys.FLOW_EXEC_ID, mrJobsList.get(0).flowExecId); + dataset.addProperty(JsonKeys.FLOW_DEF_ID, mrJobsList.get(0).flowDefId); + dataset.add(JsonKeys.JOBS_SEVERITY, jobSeverity); + workflowSummaryArray.add(dataset); + } + JsonArray sortedWorkflowSummaryArray = getSortedJsonArrayByFinishTime(workflowSummaryArray); + JsonObject parent = new JsonObject(); + parent.add(JsonKeys.WORKFLOW_SUMMARIES, sortedWorkflowSummaryArray); + return ok(new Gson().toJson(parent)); + } + + /** + * This method returns the workflow response object based on the flow execution id + * @param flowId The flow execution id for which the flow should be returned + * @return Return the workflow detail based on the flow execution id + * + * response object: + *
+   * *{
+   *    "workflows": {
+   *    "id": "flowid",
+   *        "username": "username",
+   *        "starttime": 1471910835628,
+   *        "finishtime": 1471911099238,
+   *        "runtime": 263610,
+   *        "waittime": 46234,
+   *        "resourceused": 101382144,
+   *        "resourcewasted": 15993417,
+   *        "severity": "Moderate",
+   *        "flowexecid": "flowexecid",
+   *        "flowdefid": "flowdefid",
+   *        "jobssummaries": [
+   *          {
+   *            "id": "jobid",
+   *            "jobname": "jobname",
+   *            "jobtype": "Pig",
+   *            "username": "username",
+   *            "starttime": 1471910835628,
+   *            "finishtime": 1471911099238,
+   *            "runtime": 263610,
+   *            "waittime": 46234,
+   *            "resourceused": 101382144,
+   *            "resourcewasted": 15993417,
+   *            "severity": "Moderate",
+   *            "tasksseverity": [
+   *              {
+   *                "severity": "Moderate",
+   *                "count": 1
+   *              }
+   *             ]
+   *        }
+   *      ],
+   *        "jobsseverity": [
+   *          {
+   *            "severity": "Moderate",
+   *            "count": 1
+   *          }
+   *      ]
+   *  }
+   *}
+   * 
+ */ + public static Result restWorkflowFromFlowId(String flowId) { + + if (flowId == null || flowId.isEmpty()) { + JsonObject parent = new JsonObject(); + parent.add(JsonKeys.WORKFLOWS, new JsonObject()); + return notFound(new Gson().toJson(parent)); + } + + JsonArray jobSeverityArray = new JsonArray(); + JsonArray jobSummaryArray = new JsonArray(); + JsonObject data = new JsonObject(); + + String flowExecId = flowId; + String username = ""; + long totalFlowResourceUsed = 0; + long totalFlowResourceWasted = 0; + long totalFlowRuntime = 0; + long totalFlowDelay = 0; + Severity flowSeverity = Severity.NONE; + long flowStartTime = Long.MAX_VALUE; + long flowEndTime = 0; + String flowDefinitionId = ""; + Map jobSeverityCount = new HashMap(); + String wfQueueName = ""; + String wfSchedulerName = ""; + + List results = getRestFlowResultsFromFlowExecutionId(flowId); + + if (results.isEmpty()) { + JsonObject parent = new JsonObject(); + parent.add(JsonKeys.WORKFLOWS, data); + return notFound(new Gson().toJson(parent)); + } + + Map> jobExecIdToJobsMap = + ControllerUtil.groupJobs(results, ControllerUtil.GroupBy.JOB_EXECUTION_ID); + + for (IdUrlPair jobDefPair : jobExecIdToJobsMap.keySet()) { + long totalJobMemoryUsed = 0; + long totalJobMemoryWasted = 0; + long totalJobDelay = 0; + long totalJobRuntime = 0; + long jobStartTime = Long.MAX_VALUE; + long jobEndTime = 0; + Severity jobSeverity = Severity.NONE; + String jobType = null; + String jobId = jobDefPair.getId(); + String jobName = ""; + String queueName = ""; + String schedulerName = ""; + + Map taskSeverityCount = new HashMap(); + + for (AppResult task : jobExecIdToJobsMap.get(jobDefPair)) { + totalJobMemoryUsed += task.resourceUsed; + totalJobMemoryWasted += task.resourceWasted; + username = task.username; + jobType = task.jobType; + jobName = task.jobName; + flowDefinitionId = task.flowDefId; + queueName = task.queueName; + schedulerName = task.scheduler; + + if (task.startTime < jobStartTime) { + jobStartTime = task.startTime; + } + + if (task.finishTime > jobEndTime) { + jobEndTime = task.finishTime; + } + if (task.severity.getValue() > jobSeverity.getValue()) { + jobSeverity = task.severity; + } + + if (taskSeverityCount.containsKey(task.severity)) { + taskSeverityCount.put(task.severity, taskSeverityCount.get(task.severity) + 1L); + } else { + taskSeverityCount.put(task.severity, 1L); + } + } // task scope ends here + + if (jobSeverityCount.containsKey(jobSeverity)) { + jobSeverityCount.put(jobSeverity, jobSeverityCount.get(jobSeverity) + 1L); + } else { + jobSeverityCount.put(jobSeverity, 1L); + } + + JsonArray taskSeverity = new JsonArray(); + List keys = getSortedSeverityKeys(taskSeverityCount.keySet()); + for (Severity key : keys) { + JsonObject severityObject = new JsonObject(); + severityObject.addProperty(JsonKeys.SEVERITY, key.getText()); + severityObject.addProperty(JsonKeys.COUNT, taskSeverityCount.get(key)); + taskSeverity.add(severityObject); + } + + wfQueueName = queueName; + wfSchedulerName = schedulerName; + totalJobDelay = Utils.getTotalWaittime(jobExecIdToJobsMap.get(jobDefPair)); + totalJobRuntime = Utils.getTotalRuntime(jobExecIdToJobsMap.get(jobDefPair)); + + JsonObject jobObject = new JsonObject(); + jobObject.addProperty(JsonKeys.ID, jobId); + jobObject.addProperty(JsonKeys.JOB_NAME, jobName); + jobObject.addProperty(JsonKeys.JOB_TYPE, jobType); + jobObject.addProperty(JsonKeys.USERNAME, username); + jobObject.addProperty(JsonKeys.START_TIME, jobStartTime); + jobObject.addProperty(JsonKeys.FINISH_TIME, jobEndTime); + jobObject.addProperty(JsonKeys.RUNTIME, totalJobRuntime); + jobObject.addProperty(JsonKeys.WAITTIME, totalJobDelay); + jobObject.addProperty(JsonKeys.RESOURCE_USED, totalJobMemoryUsed); + jobObject.addProperty(JsonKeys.RESOURCE_WASTED, totalJobMemoryWasted); + jobObject.addProperty(JsonKeys.QUEUE, queueName); + jobObject.addProperty(JsonKeys.SCHEDULER, schedulerName); + jobObject.addProperty(JsonKeys.SEVERITY, jobSeverity.getText()); + jobObject.add(JsonKeys.TASKS_SEVERITY, taskSeverity); + + jobSummaryArray.add(jobObject); + + totalFlowResourceUsed += totalJobMemoryUsed; + totalFlowResourceWasted += totalJobMemoryWasted; + if (jobSeverity.getValue() > flowSeverity.getValue()) { + flowSeverity = jobSeverity; + } + + if (flowStartTime > jobStartTime) { + flowStartTime = jobStartTime; + } + + if (flowEndTime < jobEndTime) { + flowEndTime = jobEndTime; + } + }// job map scope ends here + + List keys = getSortedSeverityKeys(jobSeverityCount.keySet()); + for (Severity key : keys) { + JsonObject severityObject = new JsonObject(); + severityObject.addProperty(JsonKeys.SEVERITY, key.getText()); + severityObject.addProperty(JsonKeys.COUNT, jobSeverityCount.get(key)); + jobSeverityArray.add(severityObject); + } + + totalFlowDelay = Utils.getTotalWaittime(results); + totalFlowRuntime = Utils.getTotalRuntime(results); + data.addProperty(JsonKeys.ID, flowExecId); + data.addProperty(JsonKeys.USERNAME, username); + data.addProperty(JsonKeys.START_TIME, flowStartTime); + data.addProperty(JsonKeys.FINISH_TIME, flowEndTime); + data.addProperty(JsonKeys.RUNTIME, totalFlowRuntime); + data.addProperty(JsonKeys.WAITTIME, totalFlowDelay); + data.addProperty(JsonKeys.RESOURCE_USED, totalFlowResourceUsed); + data.addProperty(JsonKeys.RESOURCE_WASTED, totalFlowResourceWasted); + data.addProperty(JsonKeys.SEVERITY, flowSeverity.getText()); + data.addProperty(JsonKeys.FLOW_EXEC_ID, flowExecId); + data.addProperty(JsonKeys.FLOW_DEF_ID, flowDefinitionId); + data.addProperty(JsonKeys.QUEUE, wfQueueName); + data.addProperty(JsonKeys.SCHEDULER, wfSchedulerName); + data.add(JsonKeys.JOBSSUMMARIES, jobSummaryArray); + data.add(JsonKeys.JOBS_SEVERITY, jobSeverityArray); + JsonObject parent = new JsonObject(); + parent.add(JsonKeys.WORKFLOWS, data); + return ok(new Gson().toJson(parent)); + } + + /** + * + * @param jobId + * @return + *
+   **{
+   *  "jobs": {
+   *    "id": "jobid",
+   *    "username": "username",
+   *    "jobname": "jobname",
+   *    "jobtype": "Pig",
+   *    "starttime": 1471910835628,
+   *    "finishtime": 1471911099238,
+   *    "runtime": 263610,
+   *    "waittime": 46234,
+   *    "resourceused": 101382144,
+   *    "resourcewasted": 15993417,
+   *    "severity": "Moderate",
+   *    "jobexecid": "jobexecid",
+   *    "jobdefid": "jobdefid",
+   *    "flowexecid": "flowexecid",
+   *    "flowdefid": "flowdefid",
+   *    "taskssummaries": [
+   *      {
+   *        "id": "application_id",
+   *        "username": "username",
+   *        "starttime": 1471910835628,
+   *        "finishtime": 1471911099238,
+   *        "runtime": 263610,
+   *        "waittime": 46234,
+   *        "resourceused": 101382144,
+   *        "resourcewasted": 15993417,
+   *        "severity": "Moderate",
+   *        "heuristicsummary": [
+   *          {
+   *            "name": "Mapper Data Skew",
+   *            "severity": "None"
+   *          },
+   *          {
+   *            "name": "Mapper GC",
+   *            "severity": "None"
+   *          },
+   *          {
+   *            "name": "Mapper Time",
+   *            "severity": "Moderate"
+   *          },
+   *          {
+   *            "name": "Mapper Speed",
+   *            "severity": "None"
+   *          },
+   *          {
+   *            "name": "Mapper Spill",
+   *            "severity": "None"
+   *          },
+   *          {
+   *            "name": "Mapper Memory",
+   *            "severity": "None"
+   *          },
+   *          {
+   *            "name": "Reducer Data Skew",
+   *            "severity": "None"
+   *          },
+   *          {
+   *            "name": "Reducer GC",
+   *            "severity": "None"
+   *          },
+   *          {
+   *            "name": "Reducer Time",
+   *            "severity": "None"
+   *          },
+   *          {
+   *            "name": "Reducer Memory",
+   *            "severity": "None"
+   *          },
+   *          {
+   *            "name": "Shuffle & Sort",
+   *            "severity": "Low"
+   *          }
+   *        ]
+   *      }
+   *    ],
+   *    "tasksseverity": [
+   *      {
+   *        "severity": "Moderate",
+   *        "count": 1
+   *      }
+   *    ]
+   *  }
+   *}
+   *
+   * 
+ */ + public static Result restJobFromJobId(String jobid) { + + if (jobid == null || jobid.isEmpty()) { + JsonObject parent = new JsonObject(); + parent.add(JsonKeys.JOBS, new JsonObject()); + return notFound(new Gson().toJson(parent)); + } + + JsonArray taskSummaryArray = new JsonArray(); + + String jobDefID = jobid; + long jobResourceUsed = 0; + long jobResourceWasted = 0; + long jobRuntime = 0; + long jobDelay = 0; + Severity jobSeverity = Severity.NONE; + long jobStartTime = Long.MAX_VALUE; + long jobEndTime = 0; + String username = ""; + String jobtype = ""; + String jobExecutionId = ""; + String jobDefinitionId = ""; + String flowExecutionId = ""; + String flowDefinitionId = ""; + String jobname = ""; + String queueName = ""; + String scheduler = ""; + + List results = getRestJobResultsFromJobExecutionId(jobid); + + if (results.isEmpty()) { + JsonObject parent = new JsonObject(); + parent.add(JsonKeys.JOBS, new JsonObject()); + return notFound(new Gson().toJson(parent)); + } + + Map taskSeverityCount = new HashMap(); + + for (AppResult task : results) { + username = task.username; + jobtype = task.jobType; + jobname = task.jobName; + jobExecutionId = task.jobExecId; + jobDefinitionId = task.jobDefId; + flowExecutionId = task.flowExecId; + flowDefinitionId = task.flowDefId; + queueName = task.queueName; + scheduler = task.scheduler; + + JsonObject taskObject = new JsonObject(); + JsonArray heuristicsArray = new JsonArray(); + List appHeuristicResult = task.yarnAppHeuristicResults; + for (AppHeuristicResult heuristic : appHeuristicResult) { + JsonObject heuristicObject = new JsonObject(); + heuristicObject.addProperty(JsonKeys.NAME, heuristic.heuristicName); + heuristicObject.addProperty(JsonKeys.SEVERITY, heuristic.severity.getText()); + heuristicsArray.add(heuristicObject); + } + + if (task.severity.getValue() > jobSeverity.getValue()) { + jobSeverity = task.severity; + } + + if (taskSeverityCount.containsKey(task.severity)) { + taskSeverityCount.put(task.severity, taskSeverityCount.get(task.severity) + 1L); + } else { + taskSeverityCount.put(task.severity, 1L); + } + + taskObject.addProperty(JsonKeys.ID, task.id); + taskObject.addProperty(JsonKeys.USERNAME, task.username); + taskObject.addProperty(JsonKeys.START_TIME, task.startTime); + taskObject.addProperty(JsonKeys.FINISH_TIME, task.finishTime); + taskObject.addProperty(JsonKeys.RUNTIME, task.finishTime - task.startTime); + taskObject.addProperty(JsonKeys.WAITTIME, task.totalDelay); + taskObject.addProperty(JsonKeys.RESOURCE_USED, task.resourceUsed); + taskObject.addProperty(JsonKeys.RESOURCE_WASTED, task.resourceWasted); + taskObject.addProperty(JsonKeys.SEVERITY, task.severity.getText()); + taskObject.addProperty(JsonKeys.QUEUE, task.queueName); + taskObject.add(JsonKeys.HEURISTICS_SUMMARY, heuristicsArray); + taskSummaryArray.add(taskObject); + + jobResourceUsed += task.resourceUsed; + jobResourceWasted += task.resourceWasted; + + if (jobSeverity.getValue() < task.severity.getValue()) { + jobSeverity = task.severity; + } + if (jobStartTime > task.startTime) { + jobStartTime = task.startTime; + } + + if (jobEndTime < task.finishTime) { + jobEndTime = task.finishTime; + } + } + + JsonArray taskSeverity = new JsonArray(); + List keys = getSortedSeverityKeys(taskSeverityCount.keySet()); + for (Severity key : keys) { + JsonObject severityObject = new JsonObject(); + severityObject.addProperty(JsonKeys.SEVERITY, key.getText()); + severityObject.addProperty(JsonKeys.COUNT, taskSeverityCount.get(key)); + taskSeverity.add(severityObject); + } + + jobRuntime = Utils.getTotalRuntime(results); + jobDelay = Utils.getTotalWaittime(results); + JsonObject data = new JsonObject(); + data.addProperty(JsonKeys.ID, jobDefID); + data.addProperty(JsonKeys.USERNAME, username); + data.addProperty(JsonKeys.JOB_NAME, jobname); + data.addProperty(JsonKeys.JOB_TYPE, jobtype); + data.addProperty(JsonKeys.START_TIME, jobStartTime); + data.addProperty(JsonKeys.FINISH_TIME, jobEndTime); + data.addProperty(JsonKeys.RUNTIME, jobRuntime); + data.addProperty(JsonKeys.WAITTIME, jobDelay); + data.addProperty(JsonKeys.RESOURCE_USED, jobResourceUsed); + data.addProperty(JsonKeys.RESOURCE_WASTED, jobResourceWasted); + data.addProperty(JsonKeys.SEVERITY, jobSeverity.getText()); + data.addProperty(JsonKeys.JOB_EXEC_ID, jobExecutionId); + data.addProperty(JsonKeys.JOB_DEF_ID, jobDefinitionId); + data.addProperty(JsonKeys.FLOW_EXEC_ID, flowExecutionId); + data.addProperty(JsonKeys.FLOW_DEF_ID, flowDefinitionId); + data.addProperty(JsonKeys.QUEUE, queueName); + data.addProperty(JsonKeys.SCHEDULER, scheduler); + data.add(JsonKeys.TASKS_SUMMARIES, taskSummaryArray); + data.add(JsonKeys.TASKS_SEVERITY, taskSeverity); + + JsonObject parent = new JsonObject(); + parent.add(JsonKeys.JOBS, data); + return ok(new Gson().toJson(parent)); + } + + /** + * @param applicationId + * @return + *
+   *  {
+   *  "applications": {
+   *    "id": "application_id",
+   *    "username": "username",
+   *    "jobtype": "Pig",
+   *    "mapreducejobname": "mapreducejobname",
+   *    "starttime": 1471910835628,
+   *    "finishtime": 1471911099238,
+   *    "runtime": 263610,
+   *    "waittime": 46234,
+   *    "resourceused": 101382144,
+   *    "resourcewasted": 15993417,
+   *    "severity": "Moderate",
+   *    "trackingurl": "jobtracker_address",
+   *    "jobexecid": "jobexecutionid",
+   *    "jobdefid": "jobdefinitionid",
+   *    "flowexeid": "flowexecutionid",
+   *    "flowdefid": "flowdefinitionid",
+   *    "yarnappheuristicresults": [
+   *      {
+   *        "name": "Mapper Data Skew",
+   *        "severity": "None",
+   *        "details": [
+   *          {
+   *            "name": "Group A",
+   *            "value": "236 tasks @ 506 MB avg"
+   *          },
+   *          {
+   *            "name": "Group B",
+   *            "value": "234 tasks @ 507 MB avg"
+   *          },
+   *          {
+   *            "name": "Number of tasks",
+   *            "value": "470"
+   *          }
+   *        ]
+   *      },
+   *      {
+   *        "name": "Mapper GC",
+   *        "severity": "None",
+   *        "details": [
+   *          {
+   *            "name": "Avg task CPU time (ms)",
+   *            "value": "111717"
+   *          },
+   *          {
+   *            "name": "Avg task GC time (ms)",
+   *            "value": "3197"
+   *          },
+   *          {
+   *            "name": "Avg task runtime (ms)",
+   *            "value": "105633"
+   *          },
+   *          {
+   *            "name": "Number of tasks",
+   *            "value": "470"
+   *          },
+   *          {
+   *            "name": "Task GC\/CPU ratio",
+   *            "value": "0.028616951762041588"
+   *          }
+   *        ]
+   *      }..
+   *    ]
+   *  }
+   *}
+   * 
+ */ + public static Result restApplicationFromApplicationId(String applicationid) { + + if (applicationid == null || applicationid.isEmpty()) { + JsonObject parent = new JsonObject(); + parent.add(JsonKeys.APPLICATIONS, new JsonObject()); + return notFound(new Gson().toJson(parent)); + } + + if (applicationid.startsWith("job")) { + applicationid = applicationid.replaceAll("job", "application"); + } + + JsonObject applicationObject = new JsonObject(); + JsonArray heuristicsArray = new JsonArray(); + + AppResult result = getAppResultFromApplicationId(applicationid); + + if (result == null) { + JsonObject parent = new JsonObject(); + parent.add(JsonKeys.APPLICATIONS, new JsonObject()); + return notFound(new Gson().toJson(parent)); + } + + for (AppHeuristicResult appHeuristicResult : result.yarnAppHeuristicResults) { + JsonArray detailsArray = new JsonArray(); + JsonObject heuristicResultObject = new JsonObject(); + for (AppHeuristicResultDetails details : appHeuristicResult.yarnAppHeuristicResultDetails) { + JsonObject detailsObject = new JsonObject(); + detailsObject.addProperty(JsonKeys.NAME, details.name); + detailsObject.addProperty(JsonKeys.VALUE, details.value); + detailsObject.addProperty(JsonKeys.DETAILS, details.details); + detailsArray.add(detailsObject); + } + heuristicResultObject.addProperty(JsonKeys.NAME, appHeuristicResult.heuristicName); + heuristicResultObject.addProperty(JsonKeys.SEVERITY, appHeuristicResult.severity.getText()); + heuristicResultObject.add(JsonKeys.DETAILS, detailsArray); + heuristicsArray.add(heuristicResultObject); + } + + applicationObject.addProperty(JsonKeys.ID, result.id); + applicationObject.addProperty(JsonKeys.USERNAME, result.username); + applicationObject.addProperty(JsonKeys.JOB_TYPE, result.jobType); + applicationObject.addProperty(JsonKeys.MAPREDUCE_JOB_NAME, result.jobName); + applicationObject.addProperty(JsonKeys.START_TIME, result.startTime); + applicationObject.addProperty(JsonKeys.FINISH_TIME, result.finishTime); + applicationObject.addProperty(JsonKeys.RUNTIME, result.finishTime - result.startTime); + applicationObject.addProperty(JsonKeys.WAITTIME, result.totalDelay); + applicationObject.addProperty(JsonKeys.RESOURCE_USED, result.resourceUsed); + applicationObject.addProperty(JsonKeys.RESOURCE_WASTED, result.resourceWasted); + applicationObject.addProperty(JsonKeys.SEVERITY, result.severity.getText()); + applicationObject.addProperty(JsonKeys.TRACKING_URL, result.trackingUrl); + applicationObject.addProperty(JsonKeys.JOB_EXEC_ID, result.jobExecId); + applicationObject.addProperty(JsonKeys.JOB_DEF_ID, result.jobDefId); + applicationObject.addProperty(JsonKeys.FLOW_EXEC_ID, result.flowExecId); + applicationObject.addProperty(JsonKeys.FLOW_DEF_ID, result.flowDefId); + applicationObject.addProperty(JsonKeys.QUEUE, result.queueName); + + applicationObject.add(JsonKeys.YARN_APP_HEURISTIC_RESULTS, heuristicsArray); + + JsonObject parent = new JsonObject(); + parent.add(JsonKeys.APPLICATIONS, applicationObject); + return ok(new Gson().toJson(parent)); + } + + /** + * This returns the rest search options which are filled in the forms for the search page. + * @return Returns the json object which should be filled in the search form. + * return object: + *
+   *  *{
+   *  "search-options": {
+   *    "jobcategory": [
+   *      {
+   *        "name": "SPARK",
+   *        "jobtypes": [
+   *          {
+   *            "name": "Spark"
+   *          }
+   *        ],
+   *        "heuristics": [
+   *          {
+   *            "name": "Spark Configuration Best Practice"
+   *          },
+   *          {
+   *            "name": "Spark Memory Limit"
+   *          },
+   *          {
+   *            "name": "Spark Stage Runtime"
+   *          },
+   *          {
+   *            "name": "Spark Job Runtime"
+   *          },
+   *          {
+   *            "name": "Spark Executor Load Balance"
+   *          },
+   *          {
+   *            "name": "Spark Event Log Limit"
+   *          }
+   *        ]
+   *      },
+   *      {
+   *        "name": "MAPREDUCE",
+   *        "jobtypes": [
+   *          {
+   *            "name": "Pig"
+   *          },
+   *          {
+   *            "name": "Hive"
+   *          },
+   *          {
+   *            "name": "Cascading"
+   *          },
+   *          {
+   *            "name": "Voldemort"
+   *          },
+   *          {
+   *            "name": "Kafka"
+   *          },
+   *          {
+   *            "name": "HadoopJava"
+   *          }
+   *        ],
+   *        "heuristics": [
+   *          {
+   *            "name": "Mapper Data Skew"
+   *          },
+   *          {
+   *            "name": "Mapper GC"
+   *          },
+   *          {
+   *            "name": "Mapper Time"
+   *          },
+   *          {
+   *            "name": "Mapper Speed"
+   *          },
+   *          {
+   *            "name": "Mapper Spill"
+   *          },
+   *          {
+   *            "name": "Mapper Memory"
+   *          },
+   *          {
+   *            "name": "Reducer Data Skew"
+   *          },
+   *          {
+   *            "name": "Reducer GC"
+   *          },
+   *          {
+   *            "name": "Reducer Time"
+   *          },
+   *          {
+   *            "name": "Reducer Memory"
+   *          },
+   *          {
+   *            "name": "Shuffle & Sort"
+   *          },
+   *          {
+   *            "name": "Exception"
+   *          }
+   *        ]
+   *      }
+   *    ],
+   *    "severities": [
+   *      {
+   *        "name": "Critical",
+   *        "value": 4
+   *      },
+   *      {
+   *        "name": "Severe",
+   *        "value": 3
+   *      },
+   *      {
+   *        "name": "Moderate",
+   *        "value": 2
+   *      },
+   *      {
+   *        "name": "Low",
+   *        "value": 1
+   *      },
+   *      {
+   *        "name": "None",
+   *        "value": 0
+   *      }
+   *    ],
+   *    "id": "search"
+   *  }
+   *}
+   * 
+ */ + public static Result restSearchOptions() { + JsonObject searchOptions = new JsonObject(); + JsonArray jobCategory = new JsonArray(); + JsonArray severities = new JsonArray(); + + Map> applicationTypeListMap = ElephantContext.instance().getAppTypeToJobTypes(); + + for (ApplicationType key : applicationTypeListMap.keySet()) { + JsonObject applicationType = new JsonObject(); + JsonArray jobTypes = new JsonArray(); + JsonArray heuristics = new JsonArray(); + + for (JobType jobtype : applicationTypeListMap.get(key)) { + JsonObject jobTypeNode = new JsonObject(); + jobTypeNode.addProperty(JsonKeys.NAME, jobtype.getName()); + jobTypes.add(jobTypeNode); + } + + for (Heuristic heuristic : ElephantContext.instance().getHeuristicsForApplicationType(key)) { + JsonObject heuristicNode = new JsonObject(); + heuristicNode.addProperty(JsonKeys.NAME, heuristic.getHeuristicConfData().getHeuristicName()); + heuristics.add(heuristicNode); + } + + applicationType.addProperty(JsonKeys.NAME, key.getName()); + applicationType.add(JsonKeys.JOB_TYPES, jobTypes); + applicationType.add(JsonKeys.HEURISTICS, heuristics); + jobCategory.add(applicationType); + } + + for (Severity severity : Severity.values()) { + JsonObject severityObject = new JsonObject(); + severityObject.addProperty(JsonKeys.NAME, severity.getText()); + severityObject.addProperty(JsonKeys.VALUE, severity.getValue()); + severities.add(severityObject); + } + + searchOptions.add(JsonKeys.JOB_CATEGORY, jobCategory); + searchOptions.add(JsonKeys.SEVERITIES, severities); + searchOptions.addProperty(JsonKeys.ID, "search"); + JsonObject parent = new JsonObject(); + parent.add(JsonKeys.SEARCH_OPTS, searchOptions); + return ok(new Gson().toJson(parent)); + } + + /** + * Returns the search results for the given query + * @return + * JsonObject: + * + *
+   *   {
+   *         search-results: {
+   *         id: "id"
+   *         start: 0,
+   *         end: 20,
+   *         total: 0,
+   *         summaries: [
+   *                  {
+   *                    application_summary_object
+   *                  }
+   *                ]
+   *          }
+   *  }
+   * 
+ */ + public static Result search() { + DynamicForm form = Form.form().bindFromRequest(request()); + JsonObject parent = new JsonObject(); + + int offset = SEARCH_DEFAULT_PAGE_OFFSET; + int limit = SEARCH_DEFAULT_PAGE_LIMIT; + int end = 0; + int total = 0; + + if (form.get("offset") != null && form.get("offset") != "") { + offset = Integer.valueOf(form.get("offset")); + } + + if (form.get("limit") != null && form.get("limit") != "") { + limit = Integer.valueOf(form.get("limit")); + } + + if (offset < 0) { + offset = 0; + } + + if (limit > SEARCH_APPLICATION_MAX_OFFSET) { + limit = SEARCH_APPLICATION_MAX_OFFSET; + } else if (limit <= 0) { + return ok(new Gson().toJson(parent)); + } + + Query query = + Application.generateSearchQuery(AppResult.getSearchFields(), Application.getSearchParams()); + + total = query.findRowCount(); + + if (offset > total) { + offset = total; + } + + List results = query.setFirstRow(offset).setMaxRows(limit) + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, AppHeuristicResult.getSearchFields()).findList(); + + end = offset + results.size(); + + JsonArray applicationSummaryArray = new JsonArray(); + + for (AppResult application : results) { + JsonObject applicationObject = new JsonObject(); + JsonArray heuristicsArray = new JsonArray(); + List appHeuristicResult = application.yarnAppHeuristicResults; + + for (AppHeuristicResult heuristic : appHeuristicResult) { + JsonObject heuristicObject = new JsonObject(); + heuristicObject.addProperty(JsonKeys.NAME, heuristic.heuristicName); + heuristicObject.addProperty(JsonKeys.SEVERITY, heuristic.severity.getText()); + heuristicsArray.add(heuristicObject); + } + + applicationObject.addProperty(JsonKeys.ID, application.id); + applicationObject.addProperty(JsonKeys.USERNAME, application.username); + applicationObject.addProperty(JsonKeys.START_TIME, application.startTime); + applicationObject.addProperty(JsonKeys.FINISH_TIME, application.finishTime); + applicationObject.addProperty(JsonKeys.RUNTIME, application.finishTime - application.startTime); + applicationObject.addProperty(JsonKeys.WAITTIME, application.totalDelay); + applicationObject.addProperty(JsonKeys.RESOURCE_USED, application.resourceUsed); + applicationObject.addProperty(JsonKeys.RESOURCE_WASTED, application.resourceWasted); + applicationObject.addProperty(JsonKeys.SEVERITY, application.severity.getText()); + applicationObject.addProperty(JsonKeys.QUEUE, application.queueName); + + applicationObject.add(JsonKeys.HEURISTICS_SUMMARY, heuristicsArray); + applicationSummaryArray.add(applicationObject); + } + + JsonObject searchResults = new JsonObject(); + searchResults.addProperty(JsonKeys.ID, query.toString()); + searchResults.addProperty(JsonKeys.START, offset); + searchResults.addProperty(JsonKeys.END, end); + searchResults.addProperty(JsonKeys.TOTAL, total); + searchResults.add(JsonKeys.SUMMARIES, applicationSummaryArray); + parent.add(JsonKeys.SEARCH_RESULTS, searchResults); + return ok(new Gson().toJson(parent)); + } + + /** + * Returns the filter parameters for the user summary + * @return The filter parameters for the user summary + */ + public static Map getFilterParamsForUserSummary() { + DynamicForm form = Form.form().bindFromRequest(request()); + Map filterParams = new HashMap(); + filterParams.put(Application.FINISHED_TIME_BEGIN, form.get(Application.FINISHED_TIME_BEGIN)); + filterParams.put(Application.FINISHED_TIME_END, form.get(Application.FINISHED_TIME_END)); + filterParams.put(Application.STARTED_TIME_BEGIN, form.get(Application.STARTED_TIME_BEGIN)); + filterParams.put(Application.STARTED_TIME_END, form.get(Application.STARTED_TIME_END)); + return filterParams; + } + + /** + * The rest interface to return the results for a particular user. When the date is not specified, it returns the result + * for the last seven days. + * @return The json object of the form: + * result: + * * { + * "user-details": { + * "id": "user", + * "totalapplications": 3, + * "totaljobs": 3, + * "totalworkflows": 3, + * "resourceused": 101394532, + * "resourcewasted": 15999828, + * "runtime": 312283, + * "waittime": 46234, + * "start": 0, + * "end": 3, + * "total": 3, + * "summaries": [ + * { + * "id": "application_12432132131", + * "username": "user", + * "starttime": 1477389986871, + * "finishtime": 1477390004463, + * "runtime": 17592, + * "waittime": 0, + * "resourceused": 12288, + * "resourcewasted": 6360, + * "severity": "Critical", + * "queue": "spark_default", + * "heuristicsummary": [ + * { + * "name": "Spark Configuration Best Practice", + * "severity": "None" + * }, + * { + * "name": "Spark Memory Limit", + * "severity": "None" + * }, + * { + * "name": "Spark Stage Runtime", + * "severity": "Low" + * }, + * { + * "name": "Spark Job Runtime", + * "severity": "Low" + * }, + * { + * "name": "Spark Executor Load Balance", + * "severity": "Critical" + * }, + * { + * "name": "Spark Event Log Limit", + * "severity": "None" + * } + * ] + * } + * } + * } + * + */ + public static Result restGetUsersSummaryStats() { + DynamicForm form = Form.form().bindFromRequest(request()); + int offset = SEARCH_DEFAULT_PAGE_OFFSET; + int limit = SEARCH_DEFAULT_PAGE_LIMIT; + int end = 0; + int total = 0; + + if (form.get("offset") != null && form.get("offset") != "") { + offset = Integer.valueOf(form.get("offset")); + } + + if (form.get("limit") != null && form.get("limit") != "") { + limit = Integer.valueOf(form.get("limit")); + } + + if (offset < 0) { + offset = 0; + } + + if (limit > SEARCH_APPLICATION_MAX_OFFSET) { + limit = SEARCH_APPLICATION_MAX_OFFSET; + } else if (limit <= 0) { + return ok(new Gson().toJson(new JsonObject())); + } + + String sortBy = "severity"; + boolean increasing = true; + + String usernameString = form.get("usernames"); + if (usernameString == null || usernameString.isEmpty()) { + JsonObject parent = new JsonObject(); + parent.add(JsonKeys.USER_RESULTS, new JsonObject()); + return notFound(new Gson().toJson(parent)); + } + + List usernames = Arrays.asList(usernameString.split(",")); + + Map filterParamsForUserSummary = getFilterParamsForUserSummary(); + + if (form.get("sortKey") != null) { + sortBy = form.get("sortKey"); + } + + if (form.get("increasing") != null) { + increasing = Boolean.valueOf(form.get("increasing")); + } + + JsonObject userResult = new JsonObject(); + List usernameQueryList = new ArrayList(); + for (int i = 0; i < usernames.size(); i++) { + usernameQueryList.add("username=:user" + i); + } + + String usernameQueryString = StringUtils.join(usernameQueryList, " or "); + + // by default, fetch data from last week + String finishedTimeBegin = String.valueOf(System.currentTimeMillis() - DAY * 7); // week of data if not specified + String finishedTimeEnd = String.valueOf(System.currentTimeMillis()); + + if (Utils.isSet(filterParamsForUserSummary.get(Application.FINISHED_TIME_BEGIN))) { + finishedTimeBegin = filterParamsForUserSummary.get(Application.FINISHED_TIME_BEGIN); + } + + if (Utils.isSet(filterParamsForUserSummary.get(Application.FINISHED_TIME_END))) { + finishedTimeEnd = filterParamsForUserSummary.get(Application.FINISHED_TIME_END); + } + + StringBuilder timeFilterStringBuilder = new StringBuilder(); + if (finishedTimeBegin != null) { + timeFilterStringBuilder.append("finish_time"); + timeFilterStringBuilder.append(">="); + timeFilterStringBuilder.append(parseTime(String.valueOf(finishedTimeBegin))); + if (finishedTimeEnd != null) { + timeFilterStringBuilder.append(" and "); + } + } + + if (finishedTimeEnd != null) { + timeFilterStringBuilder.append("finish_time"); + timeFilterStringBuilder.append("<="); + timeFilterStringBuilder.append(parseTime(String.valueOf(finishedTimeEnd))); + } + + String timeFilterString = timeFilterStringBuilder.toString(); + + String sql; + StringBuilder sqlBuilder = new StringBuilder(); + sqlBuilder.append( + "select count(id) as num_of_applications, count(distinct(job_exec_id)) as num_of_jobs, count(distinct(flow_exec_id)) as num_of_flows, sum(resource_used) as total_resource_used, sum(resource_wasted) as total_resource_wasted, sum(finish_time) - sum(start_time) as execution_time, sum(total_delay) as total_delay from yarn_app_result where"); + if (timeFilterString != null && !timeFilterString.isEmpty()) { + sqlBuilder.append(" ( "); + sqlBuilder.append(usernameQueryString); + sqlBuilder.append(" ) and "); + sqlBuilder.append(timeFilterString); + } else { + sqlBuilder.append(" "); + sqlBuilder.append(usernameQueryString); + } + + sql = sqlBuilder.toString(); + SqlQuery query = Ebean.createSqlQuery(sql); + + int iUserIndex = 0; + for (String username : usernames) { + query.setParameter("user" + iUserIndex, username); + iUserIndex++; + } + + SqlRow resultRow = query.findUnique(); + userResult.addProperty(JsonKeys.ID, usernameString); + userResult.addProperty(JsonKeys.TOTAL_APPLICATIONS, resultRow.getLong("num_of_applications")); + userResult.addProperty(JsonKeys.TOTAL_JOBS, resultRow.getLong("num_of_jobs")); + userResult.addProperty(JsonKeys.TOTAL_WORKFLOWS, resultRow.getLong("num_of_flows")); + userResult.addProperty(JsonKeys.RESOURCE_USED, resultRow.getLong("total_resource_used")); + userResult.addProperty(JsonKeys.RESOURCE_WASTED, resultRow.getLong("total_resource_wasted")); + userResult.addProperty(JsonKeys.RUNTIME, resultRow.getLong("execution_time")); + userResult.addProperty(JsonKeys.WAITTIME, resultRow.getLong("total_delay")); + + Query userSummaryQuery = + generateUserApplicationSummaryQuery(usernames, filterParamsForUserSummary, sortBy, increasing); + + total = userSummaryQuery.findRowCount(); + + List results = userSummaryQuery.setFirstRow(offset).setMaxRows(limit) + .fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, AppHeuristicResult.getSearchFields()).findList(); + + end = offset + results.size(); + + JsonArray applicationSummaryArray = new JsonArray(); + + for (AppResult application : results) { + JsonObject applicationObject = new JsonObject(); + JsonArray heuristicsArray = new JsonArray(); + List appHeuristicResult = application.yarnAppHeuristicResults; + + for (AppHeuristicResult heuristic : appHeuristicResult) { + JsonObject heuristicObject = new JsonObject(); + heuristicObject.addProperty(JsonKeys.NAME, heuristic.heuristicName); + heuristicObject.addProperty(JsonKeys.SEVERITY, heuristic.severity.getText()); + heuristicsArray.add(heuristicObject); + } + + applicationObject.addProperty(JsonKeys.ID, application.id); + + applicationObject.addProperty(JsonKeys.USERNAME, application.username); + applicationObject.addProperty(JsonKeys.START_TIME, application.startTime); + applicationObject.addProperty(JsonKeys.FINISH_TIME, application.finishTime); + applicationObject.addProperty(JsonKeys.RUNTIME, application.finishTime - application.startTime); + applicationObject.addProperty(JsonKeys.WAITTIME, application.totalDelay); + applicationObject.addProperty(JsonKeys.RESOURCE_USED, application.resourceUsed); + applicationObject.addProperty(JsonKeys.RESOURCE_WASTED, application.resourceWasted); + applicationObject.addProperty(JsonKeys.SEVERITY, application.severity.getText()); + applicationObject.addProperty(JsonKeys.QUEUE, application.queueName); + + applicationObject.add(JsonKeys.HEURISTICS_SUMMARY, heuristicsArray); + applicationSummaryArray.add(applicationObject); + } + + userResult.addProperty(JsonKeys.START, offset); + userResult.addProperty(JsonKeys.END, end); + userResult.addProperty(JsonKeys.TOTAL, total); + userResult.add(JsonKeys.SUMMARIES, applicationSummaryArray); + + JsonObject parent = new JsonObject(); + parent.add(JsonKeys.USER_DETAILS, userResult); + return ok(new Gson().toJson(parent)); + } + + /** + * Returns the status of the exception feature. + * return: JsonObject corresponding to the exception status + * response: + * + * { + * "exception-statuses": { + * "exceptionenabled": "true", + * "schedulers": [ + * { + * "name": "azkaban" + * } + * ], + * "id": "exception-status" + * } + * } + */ + public static Result restExceptionStatuses () { + JsonObject parent = new JsonObject(); + Set schedulersConfigured = InfoExtractor.getSchedulersConfiguredForException(); + JsonObject exception = new JsonObject(); + if(schedulersConfigured.isEmpty()) { + exception.addProperty(JsonKeys.EXCEPTION_ENABLED, "true"); + exception.add(JsonKeys.SCHEDULERS, new JsonArray()); + + exception.addProperty(JsonKeys.ID, "exception-status"); + parent.add(JsonKeys.EXCEPTION_STATUSES, exception); + return ok(new Gson().toJson(parent)); + } + + JsonArray schedulers = new JsonArray(); + for(String scheduler: schedulersConfigured) { + JsonObject schedulerObject = new JsonObject(); + schedulerObject.addProperty(JsonKeys.NAME, scheduler); + schedulers.add(schedulerObject); + } + exception.addProperty(JsonKeys.EXCEPTION_ENABLED, "true"); + exception.add(JsonKeys.SCHEDULERS, schedulers); + exception.addProperty(JsonKeys.ID, "exception-status"); + parent.add(JsonKeys.EXCEPTION_STATUSES, exception); + return ok(new Gson().toJson(parent)); + } + + + + /** + * Controls Exceptions + * @throws URISyntaxException + */ + public static Result restExceptions() throws URISyntaxException, MalformedURLException, IOException, + AuthenticationException { + DynamicForm form = Form.form().bindFromRequest(request()); + String url = form.get("flow-exec-url"); + JsonObject parent = new JsonObject(); + + String scheduler = form.get("scheduler"); + + HadoopSecurity _hadoopSeverity = HadoopSecurity.getInstance(); + + + logger.info(String.format("scheduler + ", scheduler)); + if(scheduler==null) { + scheduler = "azkaban"; + logger.info(String.format("Setting scheduler ", scheduler)); + } + if(!InfoExtractor.getSchedulersConfiguredForException().contains(scheduler)) { + logger.info("scheduler not found "); + parent.add("workflow-exceptions", new JsonArray()); + return status(503,"Service is currently unavailable"); + } + if (url == null || url.isEmpty()) { + parent.add("workflow-exceptions", new JsonArray()); + return notFound(new Gson().toJson(parent)); + } else { + ExceptionFinder expGen = new ExceptionFinder(url, scheduler); + HadoopException flowException = expGen.getExceptions(); + + JsonArray jobsArray = new JsonArray(); + + if (!flowException.getChildExceptions().isEmpty()) { + for (HadoopException jobException : flowException.getChildExceptions()) { + JsonObject job = new JsonObject(); + job.addProperty(JsonKeys.NAME, jobException.getId()); + job.addProperty(JsonKeys.TYPE, jobException.getType().toString()); + job.addProperty(JsonKeys.ID, jobException.getId()); + + if (jobException.getType() == HadoopException.HadoopExceptionType.SCHEDULER) { + if (jobException.getLoggingEvent() != null && jobException.getLoggingEvent().getLog() != null) { + job.addProperty(JsonKeys.EXCEPTION_SUMMARY, getSchedulerLog(jobException.getLoggingEvent().getLog())); + job.addProperty(JsonKeys.STATUS, "failed"); + } else { + job.addProperty(JsonKeys.EXCEPTION_SUMMARY, ""); + job.addProperty(JsonKeys.STATUS, "failed"); + } + } + + + if (jobException.getType() == HadoopException.HadoopExceptionType.SCRIPT) { + if (jobException.getLoggingEvent() != null && jobException.getLoggingEvent().getLog() != null) { + job.addProperty(JsonKeys.EXCEPTION_SUMMARY, getSchedulerLog(jobException.getLoggingEvent().getLog())); + job.addProperty(JsonKeys.STATUS, "failed"); + } else { + job.addProperty(JsonKeys.EXCEPTION_SUMMARY, ""); + job.addProperty(JsonKeys.STATUS, "failed"); + } + } + + JsonArray mrExceptionsArray = new JsonArray(); + if (jobException.getType() == HadoopException.HadoopExceptionType.MR) { + for (HadoopException mrJobException : jobException.getChildExceptions()) { + JsonObject child = new JsonObject(); + child.addProperty(JsonKeys.NAME, mrJobException.getId()); + if (mrJobException.getLoggingEvent() != null && mrJobException.getLoggingEvent().getLog() != null) { + child.addProperty(JsonKeys.EXCEPTION_SUMMARY, getSchedulerLog(mrJobException.getLoggingEvent().getLog())); + } else { + child.addProperty(JsonKeys.EXCEPTION_SUMMARY, ""); + } + + JsonArray taskExceptionsArray = new JsonArray(); + for (HadoopException mrTaskException : mrJobException.getChildExceptions()) { + JsonObject task = new JsonObject(); + task.addProperty(JsonKeys.NAME, mrTaskException.getId()); + if (mrTaskException.getLoggingEvent() != null && mrTaskException.getLoggingEvent().getLog() != null) { + task.addProperty(JsonKeys.EXCEPTION_SUMMARY, getSchedulerLog(mrTaskException.getLoggingEvent().getLog())); + } else { + task.addProperty(JsonKeys.EXCEPTION_SUMMARY, ""); + } + taskExceptionsArray.add(task); + } + child.add(JsonKeys.TASKS, taskExceptionsArray); + mrExceptionsArray.add(child); + } + + if(jobException.getChildExceptions().isEmpty()) { + JsonObject child = new JsonObject(); + child.addProperty(JsonKeys.NAME,""); + child.add(JsonKeys.TASKS, new JsonArray()); + child.addProperty(JsonKeys.EXCEPTION_SUMMARY, getSchedulerLog(jobException.getLoggingEvent().getLog())); + mrExceptionsArray.add(child); + } + job.add(JsonKeys.APPLICATIONS, mrExceptionsArray); + job.addProperty(JsonKeys.STATUS, "failed"); + } + jobsArray.add(job); + } + parent.add(JsonKeys.WORKFLOW_EXCEPTIONS, jobsArray); + return ok(new Gson().toJson(parent)); + } + parent.add(JsonKeys.WORKFLOW_EXCEPTIONS, jobsArray); + return ok(new Gson().toJson(parent)); + } + } + + /** + * TAkes a list of strings and appends returns a single string + * @param logs The logs by the scheduler + * @return The scheduler logs + */ + private static String getSchedulerLog(List> logs) { + if(logs==null || logs.isEmpty()) { + return ""; + } + StringBuilder builder = new StringBuilder(); + for(List lines: logs) { + for(String line: lines) { + builder.append(line); + builder.append("\n"); + } + } + return builder.toString(); + } + + + /** + * Generates the query for returning the application summaries + * @param usernames The list of usernames + * @param searchParams Any additional parameters + * @param sortKey The key on which the applications should be sorted + * @param increasing The boolean value to sort the output based on the key desc or increasing + * @return The Query object based on the given above parameters + */ + public static Query generateUserApplicationSummaryQuery(List usernames, + Map searchParams, String sortKey, boolean increasing) { + ExpressionList query = AppResult.find.select(AppResult.getSearchFields()).where(); + Junction junction = query.disjunction(); + for (String username : usernames) { + junction.eq(AppResult.TABLE.USERNAME, username); + } + query.endJunction(); + + String finishedTimeBegin = searchParams.get(Application.FINISHED_TIME_BEGIN); + if (!Utils.isSet(finishedTimeBegin)) { + finishedTimeBegin = String.valueOf(System.currentTimeMillis() - 7 * DAY); // week of data if not specified + } + + long time = parseTime(finishedTimeBegin); + if (time > 0) { + query.ge(AppResult.TABLE.FINISH_TIME, time); + } + + String finishedTimeEnd = searchParams.get(Application.FINISHED_TIME_END); + if (!Utils.isSet(finishedTimeEnd)) { + finishedTimeEnd = String.valueOf(System.currentTimeMillis()); + } + + time = parseTime(finishedTimeEnd); + if (time > 0) { + query.le(AppResult.TABLE.FINISH_TIME, time); + } + + if (increasing) { + return query.order(getSortKey(sortKey)); + } else { + return query.order().desc(getSortKey(sortKey)); + } + } + + /** + * Maps the sort key to the actual field values + * @param sortKey The sortKey + * @return The value from the sort key + */ + private static String getSortKey(String sortKey) { + if (sortKey.equals("severity")) { + return AppResult.TABLE.SEVERITY; + } else if (sortKey.equals("resourceUsed")) { + return AppResult.TABLE.RESOURCE_USAGE; + } else if (sortKey.equals("resourceWasted")) { + return AppResult.TABLE.WASTED_RESOURCES; + } else if (sortKey.equals("delay")) { + return AppResult.TABLE.TOTAL_DELAY; + } else if (sortKey.equals("finish_time")) { + return AppResult.TABLE.FINISH_TIME; + } + return "severity"; + } + + /** + * This utility method is used to sort the jsonArray based on FinishTime + * @param jsonArray The jsonArray to be sorted + * @return The sorted jsonArray based on finishtime + */ + private static JsonArray getSortedJsonArrayByFinishTime(JsonArray jsonArray) { + JsonArray sortedJsonArray = new JsonArray(); + List jsonValues = new ArrayList(); + for (int i = 0; i < jsonArray.size(); i++) { + jsonValues.add(jsonArray.get(i).getAsJsonObject()); + } + Collections.sort(jsonValues, new Comparator() { + public int compare(JsonObject a, JsonObject b) { + Long first = a.get(JsonKeys.FINISH_TIME).getAsLong(); + Long second = b.get(JsonKeys.FINISH_TIME).getAsLong(); + return second.compareTo(first); + } + }); + for (JsonObject object : jsonValues) { + sortedJsonArray.add(object); + } + return sortedJsonArray; + } + + private static List getSortedSeverityKeys(Set severities) { + List severityList = new ArrayList(); + severityList.addAll(severities); + Collections.sort(severityList, new Comparator() { + public int compare(Severity a, Severity b) { + return b.getValue() - a.getValue(); + } + }); + return severityList; + } + + /** + * Parse the string for time in long + * + * @param time The string to be parsed + * @return the epoch value + */ + private static long parseTime(String time) { + long unixTime = 0; + try { + unixTime = Long.parseLong(time); + } catch (NumberFormatException ex) { + // return 0 + } + return unixTime; + } +} diff --git a/app/model/JobHeuristicResult.java b/app/model/JobHeuristicResult.java deleted file mode 100644 index c7bd3c8ec..000000000 --- a/app/model/JobHeuristicResult.java +++ /dev/null @@ -1,42 +0,0 @@ -package model; - -import com.fasterxml.jackson.annotation.JsonBackReference; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.linkedin.drelephant.analysis.Severity; -import com.linkedin.drelephant.util.Utils; - -import play.db.ebean.Model; - -import javax.persistence.*; - -@Entity -public class JobHeuristicResult extends Model { - - private static final long serialVersionUID = 123L; - - @JsonIgnore - @Id - public int id; - - @JsonBackReference - @ManyToOne(cascade = CascadeType.ALL) - public JobResult job; - - @Column - public Severity severity; - - @Column - public String analysisName; - - @JsonIgnore - @Lob - public String data; - - @JsonIgnore - @Column - public int dataColumns; - - public String[][] getDataArray() { - return Utils.parseCsvLines(data); - } -} diff --git a/app/model/JobResult.java b/app/model/JobResult.java deleted file mode 100644 index 6b1a65959..000000000 --- a/app/model/JobResult.java +++ /dev/null @@ -1,63 +0,0 @@ -package model; - -import com.fasterxml.jackson.annotation.JsonManagedReference; -import com.linkedin.drelephant.analysis.Severity; - -import play.db.ebean.Model; -import javax.persistence.*; - -import java.util.List; - -@Entity -public class JobResult extends Model { - - private static final long serialVersionUID = 1L; - public static final int URL_LEN_LIMIT = 2048; - - @Id - @Column(length = 50) - public String job_id; - - @Column(length = 50) - public String username; - - @Column(length = 100) - public String jobName; - - @Column - public long startTime; - - @Column - public long analysisTime; - - @Column - public Severity severity; - - @Column - public JobType jobType; - - @Column(length = 200) - public String url; - - @Column(length = 100) - public String cluster; - - @Column(length = URL_LEN_LIMIT) - public String jobExecUrl; - - @Column(length = URL_LEN_LIMIT) - public String jobUrl; - - @Column(length = URL_LEN_LIMIT) - public String flowExecUrl; - - @Column(length = URL_LEN_LIMIT) - public String flowUrl; - - @JsonManagedReference - @OneToMany(cascade = CascadeType.ALL, mappedBy = "job") - public List heuristicResults; - - public static Finder find = new Finder( - String.class, JobResult.class); -} diff --git a/app/model/JobType.java b/app/model/JobType.java deleted file mode 100644 index cc17a7d38..000000000 --- a/app/model/JobType.java +++ /dev/null @@ -1,24 +0,0 @@ -package model; - -import com.avaje.ebean.annotation.EnumValue; - -public enum JobType { - @EnumValue("Hadoop") - HADOOPJAVA("HadoopJava"), - - @EnumValue("Pig") - PIG("Pig"), - - @EnumValue("Hive") - HIVE("Hive"); - - private String text; - - private JobType(String text) { - this.text = text; - } - - public String getText() { - return text; - } -} diff --git a/app/model/StringResult.java b/app/model/StringResult.java deleted file mode 100644 index 150122572..000000000 --- a/app/model/StringResult.java +++ /dev/null @@ -1,19 +0,0 @@ -package model; - -import javax.persistence.Entity; - -import com.avaje.ebean.annotation.Sql; - -@Entity -@Sql -public class StringResult { - String string; - - public void setString(String val) { - string = val; - } - - public String getString() { - return string; - } -} \ No newline at end of file diff --git a/app/models/AppHeuristicResult.java b/app/models/AppHeuristicResult.java new file mode 100644 index 000000000..96db29e19 --- /dev/null +++ b/app/models/AppHeuristicResult.java @@ -0,0 +1,86 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package models; + +import com.fasterxml.jackson.annotation.JsonManagedReference; +import java.util.List; +import javax.persistence.CascadeType; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.Id; +import javax.persistence.Lob; +import javax.persistence.ManyToOne; + +import javax.persistence.OneToMany; +import javax.persistence.Table; + +import com.fasterxml.jackson.annotation.JsonBackReference; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.util.Utils; + +import play.db.ebean.Model; + + +@Entity +@Table(name = "yarn_app_heuristic_result") +public class AppHeuristicResult extends Model { + + private static final long serialVersionUID = 2L; + + public static final int HEURISTIC_NAME_LIMIT = 128; + public static final int HEURISTIC_CLASS_LIMIT = 255; + + public static class TABLE { + public static final String TABLE_NAME = "yarn_app_heuristic_result"; + public static final String ID = "id"; + public static final String APP_RESULT_ID = "yarnAppResult"; + public static final String HEURISTIC_NAME = "heuristicName"; + public static final String SEVERITY = "severity"; + public static final String SCORE = "score"; + public static final String APP_HEURISTIC_RESULT_DETAILS = "yarnAppHeuristicResultDetails"; + } + + public static String getSearchFields() { + return Utils.commaSeparated(AppHeuristicResult.TABLE.HEURISTIC_NAME, AppHeuristicResult.TABLE.SEVERITY); + } + + @JsonIgnore + @Id + public int id; + + @JsonBackReference + @ManyToOne(cascade = CascadeType.ALL) + public AppResult yarnAppResult; + + @Column(length = HEURISTIC_CLASS_LIMIT, nullable = false) + public String heuristicClass; + + @Column(length = HEURISTIC_NAME_LIMIT, nullable = false) + public String heuristicName; + + @Column(nullable = false) + public Severity severity; + + @Column(nullable = false) + public int score; + + @JsonManagedReference + @OneToMany(cascade = CascadeType.ALL, mappedBy = "yarnAppHeuristicResult") + public List yarnAppHeuristicResultDetails; + +} \ No newline at end of file diff --git a/app/models/AppHeuristicResultDetails.java b/app/models/AppHeuristicResultDetails.java new file mode 100644 index 000000000..3af96cb24 --- /dev/null +++ b/app/models/AppHeuristicResultDetails.java @@ -0,0 +1,64 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package models; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import javax.persistence.CascadeType; +import javax.persistence.Column; +import javax.persistence.Embeddable; +import javax.persistence.EmbeddedId; +import javax.persistence.Entity; +import javax.persistence.ManyToOne; + +import javax.persistence.Table; + +import com.fasterxml.jackson.annotation.JsonBackReference; + +import play.db.ebean.Model; + + +@Entity +@Table(name = "yarn_app_heuristic_result_details") +public class AppHeuristicResultDetails extends Model { + + private static final long serialVersionUID = 3L; + + public static final int NAME_LIMIT = 128; + public static final int VALUE_LIMIT = 255; + public static final int DETAILS_LIMIT = 65535; + + public static class TABLE { + public static final String TABLE_NAME = "yarn_app_heuristic_result_details"; + public static final String APP_HEURISTIC_RESULT_ID = "yarnAppHeuristicResult"; + public static final String NAME = "name"; + public static final String VALUE = "value"; + public static final String DETAILS = "details"; + } + + @JsonBackReference + @ManyToOne(cascade = CascadeType.ALL) + public AppHeuristicResult yarnAppHeuristicResult; + + @Column(length=NAME_LIMIT, nullable = false) + public String name; + + @Column(length=VALUE_LIMIT, nullable = false) + public String value; + + @Column(nullable = true) + public String details; +} diff --git a/app/models/AppResult.java b/app/models/AppResult.java new file mode 100644 index 000000000..e7c9c7f6c --- /dev/null +++ b/app/models/AppResult.java @@ -0,0 +1,167 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package models; + +import com.fasterxml.jackson.annotation.JsonManagedReference; +import com.linkedin.drelephant.analysis.Severity; + +import com.linkedin.drelephant.util.Utils; +import java.util.Date; +import play.db.ebean.Model; + +import java.util.List; + +import javax.persistence.CascadeType; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.Id; +import javax.persistence.OneToMany; +import javax.persistence.Table; + + +@Entity +@Table(name = "yarn_app_result") +public class AppResult extends Model { + + private static final long serialVersionUID = 1L; + + public static final int ID_LIMIT = 50; + public static final int USERNAME_LIMIT = 50; + public static final int QUEUE_NAME_LIMIT = 50; + public static final int APP_NAME_LIMIT = 100; + public static final int JOB_NAME_LIMIT = 255; + public static final int TRACKING_URL_LIMIT = 255; + public static final int JOBTYPE_LIMIT = 20; + public static final int SCHEDULER_LIMIT = 20; + public static final int URL_LEN_LIMIT = 800; + public static final int FLOW_EXEC_ID_LIMIT = 255; + + // Note that the Table column constants are actually the java variable names defined in this model. + // This is because ebean operations require the model variable names to be passed as strings. + public static class TABLE { + public static final String TABLE_NAME = "yarn_app_result"; + public static final String ID = "id"; + public static final String NAME = "name"; + public static final String USERNAME = "username"; + public static final String QUEUE_NAME = "queueName"; + public static final String START_TIME = "startTime"; + public static final String FINISH_TIME = "finishTime"; + public static final String TRACKING_URL = "trackingUrl"; + public static final String JOB_TYPE = "jobType"; + public static final String SEVERITY = "severity"; + public static final String SCORE = "score"; + public static final String WORKFLOW_DEPTH = "workflowDepth"; + public static final String SCHEDULER = "scheduler"; + public static final String JOB_NAME = "jobName"; + public static final String JOB_EXEC_ID = "jobExecId"; + public static final String FLOW_EXEC_ID = "flowExecId"; + public static final String JOB_DEF_ID = "jobDefId"; + public static final String FLOW_DEF_ID = "flowDefId"; + public static final String JOB_EXEC_URL = "jobExecUrl"; + public static final String FLOW_EXEC_URL = "flowExecUrl"; + public static final String JOB_DEF_URL = "jobDefUrl"; + public static final String FLOW_DEF_URL = "flowDefUrl"; + public static final String APP_HEURISTIC_RESULTS = "yarnAppHeuristicResults"; + public static final String RESOURCE_USAGE = "resourceUsed"; + public static final String WASTED_RESOURCES = "resourceWasted"; + public static final String TOTAL_DELAY = "totalDelay"; + } + + public static String getSearchFields() { + return Utils.commaSeparated(AppResult.TABLE.NAME, AppResult.TABLE.USERNAME, TABLE.QUEUE_NAME, AppResult.TABLE.JOB_TYPE, + AppResult.TABLE.SEVERITY, AppResult.TABLE.FINISH_TIME); + } + + @Id + @Column(length = ID_LIMIT, unique = true, nullable = false) + public String id; + + @Column(length = APP_NAME_LIMIT, nullable = false) + public String name; + + @Column(length = USERNAME_LIMIT, nullable = false) + public String username; + + @Column(length = QUEUE_NAME_LIMIT, nullable = false) + public String queueName; + + @Column(nullable = false) + public long startTime; + + @Column(nullable = false) + public long finishTime; + + @Column(length = TRACKING_URL_LIMIT, nullable = false) + public String trackingUrl; + + @Column(length = JOBTYPE_LIMIT, nullable = false) + public String jobType; + + @Column(nullable = false) + public Severity severity; + + @Column(nullable = false) + public int score; + + @Column(nullable = false) + public int workflowDepth; + + @Column(length = SCHEDULER_LIMIT, nullable = true) + public String scheduler; + + @Column(length = JOB_NAME_LIMIT, nullable = false) + public String jobName; + + @Column(length = URL_LEN_LIMIT, nullable = false) + public String jobExecId; + + @Column(length = FLOW_EXEC_ID_LIMIT, nullable = false) + public String flowExecId; + + @Column(length = URL_LEN_LIMIT, nullable = false) + public String jobDefId; + + @Column(length = URL_LEN_LIMIT, nullable = false) + public String flowDefId; + + @Column(length = URL_LEN_LIMIT, nullable = false) + public String jobExecUrl; + + @Column(length = URL_LEN_LIMIT, nullable = false) + public String flowExecUrl; + + @Column(length = URL_LEN_LIMIT, nullable = false) + public String jobDefUrl; + + @Column(length = URL_LEN_LIMIT, nullable = false) + public String flowDefUrl; + + @Column(nullable = true) + public long resourceUsed; + + @Column(nullable = true) + public long resourceWasted; + + @Column(nullable = true) + public long totalDelay; + + @JsonManagedReference + @OneToMany(cascade = CascadeType.ALL, mappedBy = "yarnAppResult") + public List yarnAppHeuristicResults; + + public static Finder find = new Finder(String.class, AppResult.class); +} diff --git a/app/views/emailcritical.scala.html b/app/views/emailcritical.scala.html deleted file mode 100644 index 38de4c72d..000000000 --- a/app/views/emailcritical.scala.html +++ /dev/null @@ -1,36 +0,0 @@ -@(result: model.JobResult) - - - - -
-

Dr. Elephant needs your help!

- -

- Your Hadoop job is endangering the elephants living in our cluster. -

-

- Please take a look here to figure out - what chemicals is causing the elephants to go nuts. -

-

- Here are the aspects of the job that we need to look at:
-

    - @for(heuristicResult <- result.heuristicResults) { - @if(heuristicResult.severity == com.linkedin.drelephant.analysis.Severity.CRITICAL) { -
  • @heuristicResult.analysisName
  • - } - } -
-

-

- [Dr. Elephant] - [Jobtracker] -
@result.job_id
@result.jobName -

-

- Thanks! -

-
- - \ No newline at end of file diff --git a/app/views/help.scala.html b/app/views/help.scala.html deleted file mode 100644 index 51dc13440..000000000 --- a/app/views/help.scala.html +++ /dev/null @@ -1,18 +0,0 @@ -@(title: String)(content: Html) - -@main("Dr. Elephant", "help") { - @tags.column(3) { - @tags.panel(){ Topics } { - @for(heuristic <- com.linkedin.drelephant.ElephantAnalyser.instance().heuristicNames) { - @heuristic
- } - @com.linkedin.drelephant.ElephantAnalyser.NO_DATA
- } - } - @tags.column(9) { - @tags.panel(){ @title } { - @if(content == null){Please select a topic} - @content - } - } -} diff --git a/app/views/help/helpNoData.scala.html b/app/views/help/helpNoData.scala.html new file mode 100644 index 000000000..bc65486f0 --- /dev/null +++ b/app/views/help/helpNoData.scala.html @@ -0,0 +1,20 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +

+ This means that the JobTracker no longer provides information to Dr. Elephant for that job.
+ The JobTracker wipes job data fairly quickly, and sometimes, will result in an empty set of data for certain jobs. +

\ No newline at end of file diff --git a/app/views/help/mapreduce/helpDistributedCacheLimit.scala.html b/app/views/help/mapreduce/helpDistributedCacheLimit.scala.html new file mode 100644 index 000000000..b2af0b457 --- /dev/null +++ b/app/views/help/mapreduce/helpDistributedCacheLimit.scala.html @@ -0,0 +1,27 @@ +@* +* Copyright 2017 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +

Jobs which put large files(> 500MB) in the distributed cache are flagged.

+

Files as part of the following are considered.

+
    +
  • + mapreduce.job.cache.files +
  • +
  • + mapreduce.job.cache.archives +
  • +
+

\ No newline at end of file diff --git a/app/views/help/mapreduce/helpException.scala.html b/app/views/help/mapreduce/helpException.scala.html new file mode 100644 index 000000000..2f4b3f759 --- /dev/null +++ b/app/views/help/mapreduce/helpException.scala.html @@ -0,0 +1,25 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +

+ Dr. Elephant now starts to provide exception info for failed jobs.
+ The stacktrace is the exception of the failed task that caused this job to fail.
+ Some failed jobs does not have a valid stacktrace reported by Hadoop. In this case, Dr. Elephant cannot provide the stacktrace.
+ If Dr. Elephant cannot provide the exception, try to mine by yourself in its job history page.
+ For example, some real exceptions appear at the end of the Application Master log.
+ Some real exceptions appear in the task log.
+ We hope to provide your more insights over these exceptions in the future. +

\ No newline at end of file diff --git a/app/views/help/mapreduce/helpGC.scala.html b/app/views/help/mapreduce/helpGC.scala.html new file mode 100644 index 000000000..2b87c34d6 --- /dev/null +++ b/app/views/help/mapreduce/helpGC.scala.html @@ -0,0 +1,58 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +

+ This analysis gauges your job's GC efficiency. It shows the ratio between the task GC time and task CPU time. +

+
Example
+

+

+

+

Suggestions

+

+ If your task spends a lot time on GC, your job generally needs some code optimization. + There is no one-size-fits-all answer. We are still improving this heuristic. + We set the bar quite loose to avoid this heuristic from bothering you as much as possible. If you still get yellow/red on this one, please take care of it seriously. +

diff --git a/app/views/help/mapreduce/helpJobQueueLimit.scala.html b/app/views/help/mapreduce/helpJobQueueLimit.scala.html new file mode 100644 index 000000000..7339967d0 --- /dev/null +++ b/app/views/help/mapreduce/helpJobQueueLimit.scala.html @@ -0,0 +1,71 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +

The task execution limit on the Default queue is 15 minutes (900 seconds). You really, really want to be clear of +that limit or your job may fail as data grows or if you get assigned some slow nodes. So while running a job on the +default queue make sure each task runs between 5 to 15 minutes.

+

This analysis shows an indication of how long your tasks have taken to run on the Default Queue.

+
Example
+

+

+

+

Suggestions

+

+ If a Job has task times approaching 15 minutes then you must tune down your task runtimes(usually by increasing the + number of tasks) or by moving it to the marathon queue. The marathon queue is built for long running jobs so that + faster jobs can finish in a reasonable time without having to worry about resources being available. +

\ No newline at end of file diff --git a/app/views/helpMapperDataSkew.scala.html b/app/views/help/mapreduce/helpMapperDataSkew.scala.html similarity index 69% rename from app/views/helpMapperDataSkew.scala.html rename to app/views/help/mapreduce/helpMapperDataSkew.scala.html index fbf7733b6..50aa9312d 100644 --- a/app/views/helpMapperDataSkew.scala.html +++ b/app/views/help/mapreduce/helpMapperDataSkew.scala.html @@ -1,3 +1,19 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ +

This analysis shows whether there is a data-skew for the data entering mapper tasks.

diff --git a/app/views/help/mapreduce/helpMapperMemory.scala.html b/app/views/help/mapreduce/helpMapperMemory.scala.html new file mode 100644 index 000000000..6c31c261f --- /dev/null +++ b/app/views/help/mapreduce/helpMapperMemory.scala.html @@ -0,0 +1,17 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +@helpMemory() \ No newline at end of file diff --git a/app/views/helpMapperSpeed.scala.html b/app/views/help/mapreduce/helpMapperSpeed.scala.html similarity index 54% rename from app/views/helpMapperSpeed.scala.html rename to app/views/help/mapreduce/helpMapperSpeed.scala.html index 1e618c356..e44edfab5 100644 --- a/app/views/helpMapperSpeed.scala.html +++ b/app/views/help/mapreduce/helpMapperSpeed.scala.html @@ -1,9 +1,26 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ +

This analysis shows the effectiveness of your mapper code.
This should allow you to determine if your mapper is CPU-bound or if your mapper is outputting huge amounts of data.

- This result of the analysis shows problems with mappers with significant slow speeds for the amount of data it needs to read. + This result of the analysis shows problems with mappers with significant slow speeds for the amount of data it needs + to read.

Example

@@ -37,5 +54,11 @@

Mapper Speed

Suggestions

If your mappers are CPU bound (Average task speed ~KB/s), then your mappers are performing significant CPU work, - and you should consider optimizing your mapper code or check for inefficiencies in code. -

\ No newline at end of file + and you should consider optimizing your mapper code or check for inefficiencies in code. Alternatively, in rare + cases, it may help to reduce the size of input that each mapper can process.
+
+ The maximum map split size is controlled by the FileInputFormat.SPLIT_MAXSIZE + ("mapreduce.input.fileinputformat.split.maxsize") parameter. By + decreasing this value below dfs.block.size, you can reduce the input size for each mapper, thereby increase the + number of mappers in your job.
+

diff --git a/app/views/help/mapreduce/helpMapperSpill.scala.html b/app/views/help/mapreduce/helpMapperSpill.scala.html new file mode 100644 index 000000000..863eb3042 --- /dev/null +++ b/app/views/help/mapreduce/helpMapperSpill.scala.html @@ -0,0 +1,62 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ +

+ This heuristic gauges your mapper performance in a disk I/0 perspective. Mapper spill ratio (spilled records/output + records) is a critical indicator to your mapper performance: if the ratio is close to 2, it means each record is + spilled to disk twice(once when in-memory sort buffer is almost full, once when merging spilled splits). This + usually happens when your mappers have large amount of outputs. If the ratio is higher than 2, we suggest you try + our recommendation below. Having large disk I/O wasted on sorting output records could seriously affect your mapper + speed. To make it run faster, try our tentative recommendation. We newly enabled this heuristic and want you to test + it!
+

+

+ +

+
Example
+

+

+

+

Suggestions

+

+This heuristic is less straightforward than others, and it requires deeper hadoop knowledge. We are still working on finalizing the recommendation. Feedbacks welcomed! You could try: +

    +
  1. Increase the size of in-memory sort buffer (mapreduce.task.io.sort.mb), default 100M
  2. +
  3. Increase the buffer spill percentage (mapreduce.map.sort.spill.percent, when it is reached a background thread will start spill buffer to disk), default value is 0.8.
  4. +
  5. Use combiner to lower the map output size.
  6. +
  7. Compress mapper output (set mapreduce.map.output.compress and mapreduce.map.output.compress.codec)
  8. +
+

\ No newline at end of file diff --git a/app/views/help/mapreduce/helpMapperTime.scala.html b/app/views/help/mapreduce/helpMapperTime.scala.html new file mode 100644 index 000000000..9e79445ba --- /dev/null +++ b/app/views/help/mapreduce/helpMapperTime.scala.html @@ -0,0 +1,124 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +

+ This analysis shows how well the number of mappers is adjusted.
+ This should allow you to better tweak the number of mappers for your job.
+ There are two possible situations that needs some tuning. +

+

Mapper time too short

+

+ This usually happens when the Hadoop job has: +

    +
  • A large number of mappers
  • +
  • Short mapper avg runtime
  • +
  • Small file size
  • +
+

+
Example
+

+

+

+

Suggestions

+

+ You should tune mapper split size to reduce number of mappers and let each mapper process larger data
+ The parameters for changing split size are:
+

    +
  • mapreduce.input.fileinputformat.split.minsize/maxsize
  • +
  • pig.maxCombinedSplitSize (Pig Only)
  • +
+ Examples on how to set them: +
    +
  • HadoopJava: conf.setLong("mapreduce.input.fileinputformat.split.minsize", XXXXX)
  • +
  • Pig: set mapreduce.input.fileinputformat.split.minsize XXXXX
  • +
  • Hive: set mapreduce.input.fileinputformat.split.minsize=XXXXX
  • +
+ + The split size is controlled by formula max(minSplitSize, min(maxSplitSize, blockSize)). By default, + blockSize=512MB and minSplit < blockSize < maxSplit.
+ You should always refer to this formula.
+ In the case above, try increasing min split size and let each mapper process larger data.
+ [Note] By default HadoopJava will not combine small files, so each mapper cannot process more than + one file, and changing split size won't help. If that is your case, you should either try CombineFileInputFormat or + use Pig/Hive. +
+ See Hadoop Tuning Tips for further information.
+

+

Large files/Unsplittable files

+

+ This usually happens when the Hadoop job has: +

    +
  • A small number of mappers
  • +
  • Long mapper avg runtime
  • +
  • Large file size (a few GB's)
  • +
+

+
Example
+

+

+

+

Suggestions

+

+ The split size is too large. You should tune mapper split size to increase number of mappers and let each mapper + process less data.
+
+ The input split size is controlled by formula max(minSplitSize, min(maxSplitSize, blockSize)). See the + previous section for further details.
+ In the case above, since mapper input size >> block size and you want to increase mappers, you should decrease min split size close to BlockSize(512MB).
+ See Hadoop Tuning Tips for further information. +

diff --git a/app/views/help/mapreduce/helpMemory.scala.html b/app/views/help/mapreduce/helpMemory.scala.html new file mode 100644 index 000000000..611bb7c2b --- /dev/null +++ b/app/views/help/mapreduce/helpMemory.scala.html @@ -0,0 +1,73 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +

+ This analysis shows the task memory utilization.
+ We check the ratio between your task's consumed memory AND the requested container memory.
+ The consumed memory is the average of each task's [max consumed physical memory snapshot].
+ The requested container memory is the "mapreduce.map/reduce.memory.mb" config for this job, which is the max physical memory the job can request.
+ If this heuristic is above MODERATE, it means your used memory is relatively low compared with the requested max memory. +

+ +
Example
+

+

+

+

Suggestions

+

+ +
+ The problem indicates you requested large task memory (set mapreduce.map(or reduce).memory.mb > 2048), but the task average used physical memory is low. +
+ You should try to decrease mapreduce.map(or reduce).memory.mb. +
+ If you get OutOfMemory error, think about why such error happens before increase the memory back again. +
+ For example, see if there is any mapper/reducer input data skew (some task processes larger input than others). +
+ This is a new heuristic. We are still working on that. Any feedback is welcomed! +
+ + Examples on how to set the parameter: +

    +
  • HadoopJava: conf.setLong("mapreduce.map(or reduce).memory.mb",4096)
  • +
  • Pig: set mapreduce.map(or reduce).memory.mb 4096
  • +
  • Hive: set mapreduce.map(or reduce).memory.mb=4096
  • +
+ +
+ See Hadoop Tuning Tips for further information.
diff --git a/app/views/helpReducerDataSkew.scala.html b/app/views/help/mapreduce/helpReducerDataSkew.scala.html similarity index 68% rename from app/views/helpReducerDataSkew.scala.html rename to app/views/help/mapreduce/helpReducerDataSkew.scala.html index 61831cacb..d0213d9d6 100644 --- a/app/views/helpReducerDataSkew.scala.html +++ b/app/views/help/mapreduce/helpReducerDataSkew.scala.html @@ -1,3 +1,19 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ +

This analysis shows whether there is a data-skew for the data entering reducer tasks.

@@ -34,5 +50,4 @@

Suggestions

This is often caused by skew in the keyspace (aggregation key for group by, join key for joins).
If using Pig, try a skew join.
Otherwise, consider what you're doing in the job and if there's a better way to do it.
- THEN talk to hadoop dev

\ No newline at end of file diff --git a/app/views/help/mapreduce/helpReducerMemory.scala.html b/app/views/help/mapreduce/helpReducerMemory.scala.html new file mode 100644 index 000000000..6c31c261f --- /dev/null +++ b/app/views/help/mapreduce/helpReducerMemory.scala.html @@ -0,0 +1,17 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +@helpMemory() \ No newline at end of file diff --git a/app/views/helpReducerTime.scala.html b/app/views/help/mapreduce/helpReducerTime.scala.html similarity index 64% rename from app/views/helpReducerTime.scala.html rename to app/views/help/mapreduce/helpReducerTime.scala.html index 08daa3658..46425c2c7 100644 --- a/app/views/helpReducerTime.scala.html +++ b/app/views/help/mapreduce/helpReducerTime.scala.html @@ -1,3 +1,19 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ +

This analysis shows the efficiency of your reducers.
This should allow you to better adjust the number of reducers for your job.
@@ -7,8 +23,8 @@

Too many reducers

This happens when the Hadoop job has:

    -
  • A large number of reducers
  • -
  • Short reducer runtime
  • +
  • A large number of reducers
  • +
  • Short reducer runtime

Example
@@ -36,8 +52,8 @@

Too few reducers

This happens when the Hadoop job has:

    -
  • A small number of reducers
  • -
  • Long reducer runtime
  • +
  • A small number of reducers
  • +
  • Long reducer runtime

Example
@@ -67,6 +83,9 @@

Suggestions


For Hadoop/Java jobs: Use "jobConf.setNumReduceTasks(NUMBER_OF_REDUCERS);"
For Apache-Pig jobs: Use PARALLEL [num] clause on the operator which caused this job (Though this will probably be hard for people to understand without Lipstick)
- For Apache-Hive jobs: Use "set mapred.reduce.tasks=NUMBER_OF_REDUCERS"
- (change NUMBER_OF_TASKS to an appropriate number of tasks) + For Apache-Hive jobs: Use "set mapreduce.job.reduces=NUMBER_OF_REDUCERS"
+ For Azkaban flows, add jvm.args=-Dmapreduce.job.reduces=NUMBER_OF_REDUCERS to your job properties
+
+ Generally, Dr. Elephant(and Hadoop team) advises the ideal task time to be 5-10 minutes.
+ See Hadoop Tuning Tips for further information.

\ No newline at end of file diff --git a/app/views/help/mapreduce/helpShuffleSort.scala.html b/app/views/help/mapreduce/helpShuffleSort.scala.html new file mode 100644 index 000000000..e4d679b78 --- /dev/null +++ b/app/views/help/mapreduce/helpShuffleSort.scala.html @@ -0,0 +1,64 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +

+ This analysis shows how much time the reducer spends in shuffle and sort steps versus in the reducer code.
+ This should allow you to understand the efficiency of your reducer. +

+

+ This result of the analysis shows problems with reducers where shuffle and/or sort times are significantly larger than code runtime. +

+
Example
+

+

+

+

Suggestions

+

+ If your shuffle time is high but sort is low, you likely need to tune slowstart by setting "mapreduce.job.reduce.slowstart.completedmaps" to 0.90+ up to 1.0
+ Warning: Setting this parameter close to 1.0 will decrease shuffle time, but may increase this job's total runtime
+
+ If you want to set it to 0.95 on Nertz:
+ For Hadoop/Java jobs: Use "jobConf.setFloat("mapreduce.job.reduce.slowstart.completedmaps", 0.95f);"
+ For Apache-Pig jobs: Use "set mapreduce.job.reduce.slowstart.completedmaps 0.95"
+ For Apache-Hive jobs: Use "set mapreduce.job.reduce.slowstart.completedmaps=0.95"
+ For Azkaban flows, add jvm.args=-Dmapreduce.job.reduce.slowstart.completedmaps=0.95 to your job properties(will affect all MapReduce jobs under this azkaban job)
+ See Hadoop Tuning Tips for further information. +

\ No newline at end of file diff --git a/app/views/help/metrics/helpRuntime.scala.html b/app/views/help/metrics/helpRuntime.scala.html new file mode 100644 index 000000000..5bafe11a1 --- /dev/null +++ b/app/views/help/metrics/helpRuntime.scala.html @@ -0,0 +1,32 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +@import com.linkedin.drelephant.analysis.Metrics; + +

+ The runtime metrics shows the total runtime of your job.

+

+ +

Calculation

+
+ The runtime of the job is the difference between the time when the job was submitted to the resource manager and when the job finished. +
+ +

Example

+Let the submit time of a job be 1461837302868 ms
+Let the finish time of the job be 1461840952182 ms
+The runtime of the job will be 1461840952182 - 1461837302868 = 3649314 ms or 1.01 hours + diff --git a/app/views/help/metrics/helpUsedResources.scala.html b/app/views/help/metrics/helpUsedResources.scala.html new file mode 100644 index 000000000..06c374d76 --- /dev/null +++ b/app/views/help/metrics/helpUsedResources.scala.html @@ -0,0 +1,38 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +@import com.linkedin.drelephant.analysis.Metrics; + +

+ The Used Resources metric shows the resources used by your job in GB Hours.
+

+ +

Calculation

+
+ We define resource usage of a task as the product of container size of the task and the runtime of the task.
+ The resource usage of a job can thus be defined as the sum of resource usage of all the mapper tasks and all the reducer tasks. +
+ +

Example

+Consider a job with:
+4 mappers with runtime {12, 15, 20, 30} mins.
+4 reducers with runtime {10 , 12, 15, 18} mins.
+Container size of 4 GB
+Then,
+Resource used by all mappers: 4 * (( 12 + 15 + 20 + 30 ) / 60 ) GB Hours = 5.133 GB Hours
+Resource used by all reducers: 4 * (( 10 + 12 + 15 + 18 ) / 60 ) GB Hours = 3.666 GB Hours
+Total resource used by the job = 5.133 + 3.6666 = 8.799 GB Hours
+ diff --git a/app/views/help/metrics/helpWaittime.scala.html b/app/views/help/metrics/helpWaittime.scala.html new file mode 100644 index 000000000..185c245ee --- /dev/null +++ b/app/views/help/metrics/helpWaittime.scala.html @@ -0,0 +1,57 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +@import com.linkedin.drelephant.analysis.Metrics; + +

+ The waittime is the total time spent by the job in the waiting state. +

+ +

Calculation

+For each task, let us define the following:

+ideal_start_time := The ideal time when all the tasks should have started
+finish_time := The time when the task finished
+task_runtime := The runtime of the task
+ +
Map tasks
+For map tasks, we have

+ideal_start_time := The job submission time

+We will find the mapper task with the longest runtime ( task_runtime_max) and the task which finished last ( finish_time_last )
+The total wait time of the job due to mapper tasks would be:
+
+mapper_wait_time = finish_time_last - ( ideal_start_time + task_runtime_max)
+ +
Reduce tasks
+For reducer tasks, we have

+ideal_start_time := This is computed by looking at the reducer slow start percentage (mapreduce.job.reduce.slowstart.completedmaps) and finding the finish time +of the map task after which first reducer should have started
+We will find the reducer task with the longest runtime ( task_runtime_max) and the task which finished last ( finish_time_last )

+The total wait time of the job due to reducer tasks would be:
+reducer_wait_time = finish_time_last - ( ideal_start_time + task_runtime_max)
+ + +

Explanation

+

+ If the job didn't have to wait in the cluster than the total execution time for all the mappers would have been upper bounded by + execution time of the longest running mapper. Hence any more time taken by all the mappers to complete is the wait time of mappers that we are computing here.
+ + Similar logic goes for the computation of reducer wait time.

+ Total wait time of the job would be = mapper_wait_time + reducer_wait_time
+

+ +

We are still working on this metric so any feed back is welcomed!

+ + diff --git a/app/views/help/metrics/helpWastedResources.scala.html b/app/views/help/metrics/helpWastedResources.scala.html new file mode 100644 index 000000000..73a6a0fce --- /dev/null +++ b/app/views/help/metrics/helpWastedResources.scala.html @@ -0,0 +1,65 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +@import com.linkedin.drelephant.analysis.Metrics; + +

+ This shows the amount of resources wasted by your job in GB Hours. +

+

Calculation

+

+ To calculate the resources wasted, we calculate the following:
+

    +
  • The minimum memory wasted by the tasks (Map and Reduce)
  • +
  • The runtime of the tasks (Map and Reduce)
  • +
+
+The minimum memory wasted by a task is equal to the difference between the container size and maximum task memory(peak memory) among all tasks.
+
+The resources wasted by the task is then the minimum memory wasted by the task multiplied by the duration of the task.
+
+The total resource wasted by the job then will be equal to the sum of wasted resources of all the tasks. +
+ +Let us define the following for each task:
+
+ +peak_memory_used := The upper bound on the memory used by the task.
+runtime := The run time of the task.
+ +
+ +The peak_memory_used for any task is calculated by finding out the maximum of physical memory(max_physical_memory) used by all the tasks +and the virtual memory(virtual_memory) used by the task.
+ +Since peak_memory_used for each task is upper bounded by max_physical_memory, we can say for each task:
+
+ +peak_memory_used = Max(max_physical_memory, virtual_memory/2.1)
+Where 2.1 is the cluster memory factor. +
+
+The minimum memory wasted by each task can then be calculated as:
+
+wasted_memory = Container_size - peak_memory_used
+
+The minimum resource wasted by each task can then be calculated as:
+
+wasted_resource = wasted_memory * runtime
+
+

+ +

We are still working on this metric. Any feedback is welcomed!

diff --git a/app/views/help/spark/helpBestProperties.scala.html b/app/views/help/spark/helpBestProperties.scala.html new file mode 100644 index 000000000..02253f5ad --- /dev/null +++ b/app/views/help/spark/helpBestProperties.scala.html @@ -0,0 +1,53 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ +

There are some spark property settings that are known to be good choices in most of the scenarios. Spark users are + highly encouraged to adopt best practices whenever possible. Unaligned property settings will either yield to higher + than severe level or merely moderate warnings. Please also refer to + Spark Configuration Doc + for property tuning beyond this page's suggestions.

+

Suggestions

+ +

spark.serializer

+

+ Class to use for serializing objects that will be sent over the network or need to be cached in serialized form. + The default of Java serialization works with any Serializable Java object but is quite slow, so we recommend using + org.apache.spark.serializer.KryoSerializer and configuring Kryo serialization whenever possible. + This also becomes a default choice in Spark 1.3. +

+ +

spark.driver.memory

+

+ spark.driver.memory has a default value of 512m. Allocating more memory than default + is generally acceptable, but users should realize that a too large driver memory ask against the cluster could yield + to long queueing time. Generally we would recommend to keep the memory allocation <=8g. +

+ +

spark.shuffle.manager

+

+ Implementation to use for shuffling data. Available choices are shuffle or sort. + Sort-based shuffle is more memory-efficient and is the default option starting in 1.2. We'd recommend using + sort in almost all scenario. +

+ + +

spark.executor.cores

+

+ spark.executor.cores has a default value of 1. Our Hadoop 2 clusters currently turn + off CPU scheduling, even if you specify a large number for executor-cores, your Spark executor is not guaranteed to + get the specified number of virtual cores (vCores). This will cause the executor to run more concurrent tasks than + vCores available to it, causing frequent context switching and eventually slowing down your application + with high failure rate. We suggest setting executor-cores <= 2. +

diff --git a/app/views/help/spark/helpConfigurationHeuristic.scala.html b/app/views/help/spark/helpConfigurationHeuristic.scala.html new file mode 100644 index 000000000..abadd6167 --- /dev/null +++ b/app/views/help/spark/helpConfigurationHeuristic.scala.html @@ -0,0 +1,18 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ +

The results from this heuristic primarily inform you about key app +configuration settings, including driver memory, executor cores, +executor instances, executor memory, and the serializer.

diff --git a/app/views/help/spark/helpEventLogLimit.scala.html b/app/views/help/spark/helpEventLogLimit.scala.html new file mode 100644 index 000000000..6c3c05ada --- /dev/null +++ b/app/views/help/spark/helpEventLogLimit.scala.html @@ -0,0 +1,20 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ +

+ Spark's event log passer currently cannot handle very large event log files. It will take too long for Dr Elephant to + analyze it that might endanger the entire server. Therefore, currently we sets up a limit (100MB) for event log files, + and will by-pass the log-fetching process if the log size exceeds the limit. +

\ No newline at end of file diff --git a/app/views/help/spark/helpExecutorLoad.scala.html b/app/views/help/spark/helpExecutorLoad.scala.html new file mode 100644 index 000000000..c0365ede5 --- /dev/null +++ b/app/views/help/spark/helpExecutorLoad.scala.html @@ -0,0 +1,73 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ +

+ Unlike Map/Reduce jobs, a Spark application allocates its resources all at once and never release any during the + the entire runtime process until everything is finished. It is critical to optimize the load balance situation of + executors to avoid excessive usage of the cluster. +

+
Example
+
+

Spark Executor Load Balance

+ + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ Severity: Critical +
Average peak storage memory1.09 GB (0 B~2.04 GB)
Average runtime12min 55sec (0 sec~31min 50sec)
Average input size125.95 GB (0 B~304.21 GB)
Average output size0 B (0 B~0 B)
Average task number399 (0~962)
+
+ +

Suggestions

+
1. If there are unused executors (0 tasks, 0 seconds to run and etc.)
+

+ Even though a Spark application asks for all the resources all at once, YARN will only grant resources gradually. Users should try to allocate less in this case since those resources won't be used anyway. An + application will kick off even only part of the resources are allocated. The allocation speed is majorly depending on + how busy the cluster is. However, oftentime, asking for less memory per executor and less executor in total will + always help shorten the entire allocation time. +

+ +
2. Some executors are getting much more tasks than others.
+

+ Each RDD contains a certain number of partitions. While computing those partitions, one partition will be assigned to + only one executor. This means if a to-be-computed RDD has 10 partitions but we have 100 executors in total, only 10 + of the 100 executors will be used. Lots of low partitioned RDD can yield to an unbalanced executor leverage. A way to + improve this is to load RDD from HDFS with more partition numbers or leverage RDD#repartition method. + A good choice of partition number should be equal to or slightly less than k*[executor num] where + k is an integer around 2~5. +

diff --git a/app/views/help/spark/helpExecutorsHeuristic.scala.html b/app/views/help/spark/helpExecutorsHeuristic.scala.html new file mode 100644 index 000000000..70084339c --- /dev/null +++ b/app/views/help/spark/helpExecutorsHeuristic.scala.html @@ -0,0 +1,36 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ +

This heuristic concerns the distribution (min, 25p, median, 75p, +max) of key executor metrics including input bytes, shuffle read +bytes, shuffle write bytes, storage memory used, and task time. The +max-to-median ratio determines the severity of any particular metric.

+ +

Spark application get resources from YARN allocated all at once, +and don't release these until the application completes. Thus, it's +important to balance load on the executors to avoid wasting +resources.

+ +

To achieve better load balancing:

+ +
    +
  • use an appropriate number of partitions (some small multiple of + the # of executors) so that there are enough tasks handling those + partitions to keep the executors busy
  • +
  • try avoiding key skew; you should know which partitioner you are + using and what is the distribution of your keys
  • +
  • consider enabling spark.speculation, so that straggler tasks can + be re-launched
  • +
diff --git a/app/views/help/spark/helpJobRuntime.scala.html b/app/views/help/spark/helpJobRuntime.scala.html new file mode 100644 index 000000000..ba9b42746 --- /dev/null +++ b/app/views/help/spark/helpJobRuntime.scala.html @@ -0,0 +1,34 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ +

+One Spark application can be broken into multiple jobs and each jobs can be broken into multiple stages. +

+

Suggestions

+ +
1. High failure rate
+

+ High failure rate can have multiple causes. Using more than 2 cores per executor in YARN, unstable implementation, + unbalanced work load, not enough allocated memory, and etc. can all be the causes. Users are highly suggested to look + into detailed error logs and figure out the exact cause. +

+ +
2. Slow job runtime
+

+ Slow job runtime is typically due to unbalanced work load. Partitioning RDD into an enough number (equal or slightly + less than k*[executor num], where k is an integer between 2~5); + However, if the slow job runtime seems to happen for all executors, this might suggest the executor number allocated + is not large enough. +

diff --git a/app/views/help/spark/helpJobsHeuristic.scala.html b/app/views/help/spark/helpJobsHeuristic.scala.html new file mode 100644 index 000000000..e72005e00 --- /dev/null +++ b/app/views/help/spark/helpJobsHeuristic.scala.html @@ -0,0 +1,20 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ +

This heuristic reports job failures and high task failure rates for +each job.

+ +

Job/task failures can occur for a number of reasons, so it is +recommended to look at the YARN application error logs.

diff --git a/app/views/help/spark/helpMemoryLimit.scala.html b/app/views/help/spark/helpMemoryLimit.scala.html new file mode 100644 index 000000000..7061e3e05 --- /dev/null +++ b/app/views/help/spark/helpMemoryLimit.scala.html @@ -0,0 +1,121 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ +

The current Spark applications lacks elasticity while allocating resources. Unlike Mapreduce jobs that allocates + resources only for one map-reduce process and releases resources gradually during runtime, Spark allocates all the + resources needed for the entire application, and does not release unused ones during the life cycle. Too much meomory + allocation is dangerous for the entire cluster health. As a result, we are setting limits for both the total memory + allowed memory utilization ratio for Spark applications. +

+ +
Example
+ +
+

Spark Memory Limit

+ + + + + + + + + + + + + + + + + + + + + + + + + + +
+ Severity: Severe +
Total executor memory allocated1.95 TB
Total driver memory allocated3.2 GB
Total memory allocated for storage743.01 GB
Total memory used at peak287.63 GB
Memory utilization rate0.387
+
+ +

Suggestions

+
1. If memory utilization rate is too low (< 0.6)
+

+ memory utilization rate = [Peak memory used for storage] / [Total memory allocated for storage] +
+ A low memory utilization typically suggests that an application does not need the total amount of memory that it + currently needs. Users could choose to tune down --executor-memory/spark.executor.memory settings + proportional to the expected utilization rate needed. +

+
+ +
2. If total memory allocated is too large
+

+ There are different levels of RDD persistence (quoting Spark Programming Guide ): +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Storage Level Meaning
MEMORY_ONLY Store RDD as deserialized Java objects in the JVM. If the RDD does not fit in memory, some partitions will + not be cached and will be recomputed on the fly each time they're needed. This is the default level.
MEMORY_AND_DISK Store RDD as deserialized Java objects in the JVM. If the RDD does not fit in memory, store the + partitions that don't fit on disk, and read them from there when they're needed.
MEMORY_ONLY_SER Store RDD as serialized Java objects (one byte array per partition). + This is generally more space-efficient than deserialized objects, especially when using a + fast serializer, but more CPU-intensive to read. +
MEMORY_AND_DISK_SER Similar to MEMORY_ONLY_SER, but spill partitions that don't fit in memory to disk instead of + recomputing them on the fly each time they're needed.
DISK_ONLY Store the RDD partitions only on disk.
MEMORY_ONLY_2, MEMORY_AND_DISK_2, etc. Same as the levels above, but replicate each partition on two cluster nodes.
OFF_HEAP (experimental, not available in LinkedIn) Store RDD in serialized format in Tachyon. + Compared to MEMORY_ONLY_SER, OFF_HEAP reduces garbage collection overhead and allows executors + to be smaller and to share a pool of memory, making it attractive in environments with + large heaps or multiple concurrent applications. Furthermore, as the RDDs reside in Tachyon, + the crash of an executor does not lead to losing the in-memory cache. In this mode, the memory + in Tachyon is discardable. Thus, Tachyon does not attempt to reconstruct a block that it evicts + from memory. +
+

+Caching RDDs into memories is not the only benefits of Spark framework. In fact, even caching RDDs into disk and access it will still be +much faster compared to accessing them in Mapreduce framwork. Spark disk persistence is using the local disk instead of HDFS which is +pretty efficient. Users are encouraged to switch from MEMORY_ONLY to MEMORY_ONLY_SER and even to DISK_ONLY to reduce the total memory needed. +

diff --git a/app/views/help/spark/helpStageRuntime.scala.html b/app/views/help/spark/helpStageRuntime.scala.html new file mode 100644 index 000000000..05fe035a2 --- /dev/null +++ b/app/views/help/spark/helpStageRuntime.scala.html @@ -0,0 +1,34 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ +

+One Spark application can be broken into multiple jobs and each jobs can be broken into multiple stages. +

+

Suggestions

+ +
1. High failure rate
+

+ High failure rate can have multiple causes. Using more than 2 cores per executor in YARN, unstable implementation, + unbalanced work load, not enough allocated memory, and etc. can all be the causes. Users are highly suggested to look + into detailed error logs and figure out the exact cause. +

+ +
2. Slow stage runtime
+

+ Slow stage runtime is typically due to unbalanced work load. Partitioning RDD into an enough number (equal or slightly + less than k*[executor num], where k is an integer between 2~5); + However, if the slow job runtime seems to happen for all executors, this might suggest the executor number allocated + is not large enough. +

diff --git a/app/views/help/spark/helpStagesHeuristic.scala.html b/app/views/help/spark/helpStagesHeuristic.scala.html new file mode 100644 index 000000000..6af0162d1 --- /dev/null +++ b/app/views/help/spark/helpStagesHeuristic.scala.html @@ -0,0 +1,20 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ +

This heuristic reports stage failures, high task failure rates for +each stage, and long average executor runtimes for each stage.

+ +

Stage/task failures can occur for a number of reasons, so it is +recommended to look at the YARN application error logs.

diff --git a/app/views/helpMapperInputSize.scala.html b/app/views/helpMapperInputSize.scala.html deleted file mode 100644 index dad7880fa..000000000 --- a/app/views/helpMapperInputSize.scala.html +++ /dev/null @@ -1,79 +0,0 @@ -

- This analysis shows how well the number of mappers is adjusted.
- This should allow you to better tweak the number of mappers for your job.
- There are two possible situations that needs some tuning. -

-

Too many small files

-

- This usually happens when the Hadoop job has: -

    -
  • A large number of mappers
  • -
  • Small file size (in the KBs)
  • -
-

-
Example
-

-

-

-

Suggestions

-

- Set the number of mappers smaller by specifying a number or combining small files using Pig or Hive.
-
- For Hadoop/Java jobs: Try to use mapred.max.split.size in the job conf to split the input files appropriately.
- For Apache-Pig jobs: Lower pig.maxCombinedSplitSize and set mapred.max.split.size to something smaller.
- For Apache-Hive jobs: Try to use mapred.max.split.size in the job conf to split the input files appropriately.
-

-

Large files/Unsplittable files

-

- This usually happens when the Hadoop job has: -

    -
  • A small number of mappers
  • -
  • Large file size (a few GB's)
  • -
-

-
Example
-

-

-

-

Suggestions

-

- Set the number of mappers larger by giving a specific number.
-
- For Hadoop/Java jobs: Use "jobConf.setNumMapTasks(NUMBER_OF_MAPPERS);"
- For Apache-Pig jobs: Ask Hadoop-Dev
- For Apache-Hive jobs: Ask Hadoop-Dev
-

\ No newline at end of file diff --git a/app/views/helpNoData.scala.html b/app/views/helpNoData.scala.html deleted file mode 100644 index 6dcdddb47..000000000 --- a/app/views/helpNoData.scala.html +++ /dev/null @@ -1,4 +0,0 @@ -

- This means that the JobTracker no longer provides information to Dr. Elephant for that job.
- The JobTracker wipes job data fairly quickly, and sometimes, will result in an empty set of data for certain jobs. -

\ No newline at end of file diff --git a/app/views/helpShuffleSort.scala.html b/app/views/helpShuffleSort.scala.html deleted file mode 100644 index 7db398b4e..000000000 --- a/app/views/helpShuffleSort.scala.html +++ /dev/null @@ -1,40 +0,0 @@ -

- This analysis shows how much time the reducer spends in shuffle and sort steps versus in the reducer code.
- This should allow you to understand the efficiency of your reducer. -

-

- This result of the analysis shows problems with reducers where shuffle and/or sort times are significantly larger than code runtime. -

-
Example
-

-

-

-

Suggestions

-

- If your shuffle time is high but sort is low, you likely need to turn slowstart on -

\ No newline at end of file diff --git a/app/views/index.scala.html b/app/views/index.scala.html index e50178e11..95c92df08 100644 --- a/app/views/index.scala.html +++ b/app/views/index.scala.html @@ -1,11 +1,42 @@ -@(numJobsAnalyzed: Int, numJobsSevere: Int, numJobsCritical: Int)(results: Html) - -@main("Dr. Elephant", "dashboard") { -
-

Dr. Elephant has been busy!

-

We looked through @numJobsAnalyzed jobs today.
- About @numJobsSevere of them could use some tuning.
- About @numJobsCritical of them need some serious attention!

-
- @results -} + + + + + + + + + DrElephant + + + + + + + + + + + + + + + + + + + diff --git a/app/views/main.scala.html b/app/views/main.scala.html index eca403f7f..5ce4c2a70 100644 --- a/app/views/main.scala.html +++ b/app/views/main.scala.html @@ -1,60 +1,89 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + @(title: String, page: String)(content: Html) +@import java.lang.Boolean; var analytics = false +@(analytics=play.Play.application().configuration().getBoolean("enable.analytics", false)) + +@* +* The base layout of Dr. Elephant +* +* This includes the nav bar at the top and other supporting stylesheets +*@ - - @title - - - - - - + + @title + + + - - - + + - - + + + - + - + + diff --git a/app/views/results/jobHistoryResults.scala.html b/app/views/results/jobHistoryResults.scala.html new file mode 100644 index 000000000..070cdd499 --- /dev/null +++ b/app/views/results/jobHistoryResults.scala.html @@ -0,0 +1,108 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +@(jobDefPair: IdUrlPair, results: java.util.Map[IdUrlPair, java.util.List[models.AppResult]], maxStages: Int, + flowExecTimeList:java.util.List[Long]) + +@import com.linkedin.drelephant.analysis.Severity + +@getSeverityColor(severity : Severity) = @{ + var color: String = "#5cb85c"; // LOW or NONE + + if(severity.getText.equalsIgnoreCase("CRITICAL")) { + color = "#d9534f" + } else if(severity.getText.equalsIgnoreCase("SEVERE")) { + color = "#e4804e" + } else if(severity.getText.equalsIgnoreCase("MODERATE")) { + color = "#f0ad4e" + } + + color +} + +@if(results != null && results.nonEmpty) { +
+

Job History

+
+ + + +
+ + +
+ Loading... + +
+ +
+ + +
+ + + + + @for(i <- 1 to maxStages) { + + } + + + + + @for((flowExecPair, jobs) <- results) { + + + + + + + @for(i <- 1 to maxStages) { + + } + + } + +
Job ExecutionsStage @i
+ Loading... + + @if(i <= jobs.length) { + @for((appHeuristicResult, jobCount) <- jobs(i-1).yarnAppHeuristicResults.zipWithIndex) { + ◉ + +
+

@appHeuristicResult.yarnAppResult.id

+

@appHeuristicResult.heuristicName

+ + + @for(appHeuristicResultDetail <- appHeuristicResult.yarnAppHeuristicResultDetails) { + + + + + } + +
@appHeuristicResultDetail.name@appHeuristicResultDetail.value
+
+
+ } + } +
+
+
+
+} \ No newline at end of file diff --git a/app/views/results/jobMetricsHistoryResults.scala.html b/app/views/results/jobMetricsHistoryResults.scala.html new file mode 100644 index 000000000..7ca3cff78 --- /dev/null +++ b/app/views/results/jobMetricsHistoryResults.scala.html @@ -0,0 +1,147 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ +@(jobDefPair: IdUrlPair, graphType: String, results: java.util.Map[IdUrlPair, java.util.List[models.AppResult]], maxStages: Int, + flowExecTimeList: java.util.List[Long]) + + @import com.linkedin.drelephant.analysis.Severity + @import com.linkedin.drelephant.util.Utils; + + @getSeverityColor(severity: Severity) = @{ + var color: String = "#5cb85c"; // LOW or NONE + + if(severity.getText.equalsIgnoreCase("CRITICAL")) { + color = "#d9534f" + } else if(severity.getText.equalsIgnoreCase("SEVERE")) { + color = "#e4804e" + } else if(severity.getText.equalsIgnoreCase("MODERATE")) { + color = "#f0ad4e" + } + + color + } + + @if(results != null && results.nonEmpty) { +
+

Job History

+
+ @if(graphType.equals("resources")) { + + + + } else { + + + + } + +
+ +
+ Loading... + +
+ +
+ + +
+ + + + + + + @for(i <- 1 to maxStages) { + + } + + + + + @for((flowExecPair, jobs) <- results) { + + + + + + + @for(i <- 1 to maxStages) { + + } + + } + +
Job ExecutionsStage @i + + + + +
+
+ + + + + + + + + +
used resources +
+ The resources used by the stage in GB Hours +
+
wasted resources +
+ The total resources wasted by the stage in GB Hours +
+
total runtime +
+ The total running time of the stage in HH:MM:SS +
+
total wait time +
+ The total wait time for the stage in HH:MM:SS +
+
+
+
+
+ Loading... + + @if(i <= jobs.length) { +
+ + + + + + + + + +
@(Utils.getResourceInGBHours(jobs(i - 1).resourceUsed).split("GB")(0)) + @(Utils.getResourceInGBHours(jobs(i - 1).resourceWasted).split("GB")(0)) + @(Utils.getDurationBreakdown(jobs(i - 1).finishTime - jobs(i - 1).startTime).split("Hours")(0)) + @(Utils.getDurationBreakdown(jobs(i - 1).totalDelay).split("Hours")(0)) +
+
+ } +
+
+
+
+ } \ No newline at end of file diff --git a/app/views/results/oldFlowHistoryResults.scala.html b/app/views/results/oldFlowHistoryResults.scala.html new file mode 100644 index 000000000..3bcfaae8b --- /dev/null +++ b/app/views/results/oldFlowHistoryResults.scala.html @@ -0,0 +1,108 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +@(flowDefPair: IdUrlPair, results: java.util.Map[IdUrlPair, java.util.Map[IdUrlPair, java.util.List[models.AppResult]]], + idPairToJobNameMap: java.util.Map[IdUrlPair, String], flowExecTimeList: java.util.List[Long]) + +@import com.linkedin.drelephant.analysis.Severity +@import scala.Predef; var jobDefIndex = 0 + +@getSeverityColor(severity : Severity) = @{ + var color: String = "#5cb85c"; // LOW or NONE + + if(severity.getText.equalsIgnoreCase("CRITICAL")) { + color = "#d9534f" + } else if(severity.getText.equalsIgnoreCase("SEVERE")) { + color = "#e4804e" + } else if(severity.getText.equalsIgnoreCase("MODERATE")) { + color = "#f0ad4e" + } + + color +} + +@if(results != null && results.nonEmpty) { + @tags.panel(){ Flow History Results: @flowDefPair.getId} { + + + + +
+ + +
+ + +
+ +
+ + +
+ + + + + @for((jobDefPair, jobName) <- idPairToJobNameMap) { + + } + + + + + @for((flowExecPair, jobMap) <- results) { + + + + + + + @for((jobDefPair, jobName) <- idPairToJobNameMap) { + + } + + + } + + +
Flow Executions + Job @{jobDefIndex = jobDefIndex + 1; jobDefIndex}
+ @if(jobName.length > 45) { @jobName.substring(0, 41)... } else { @jobName } +
+
+ Loading... + + @if(jobMap.get(jobDefPair) != null) { + @for((result,jobCount) <- jobMap.get(jobDefPair).zipWithIndex) { + ◉ + +
+

@result.id

+ @for(yarnAppHeuristicResult <- result.yarnAppHeuristicResults) { +

@yarnAppHeuristicResult.heuristicName

+ } +
+
+ } + } +
+
+
+ } +} \ No newline at end of file diff --git a/app/views/results/oldFlowMetricsHistoryResults.scala.html b/app/views/results/oldFlowMetricsHistoryResults.scala.html new file mode 100644 index 000000000..0eb35a8cd --- /dev/null +++ b/app/views/results/oldFlowMetricsHistoryResults.scala.html @@ -0,0 +1,159 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +@(flowDefPair: IdUrlPair, graphType: String, results: java.util.Map[IdUrlPair, java.util.Map[IdUrlPair, java.util.List[models.AppResult]]], + idPairToJobNameMap: java.util.Map[IdUrlPair, String], flowExecTimeList: java.util.List[Long]) + +@import com.linkedin.drelephant.util.Utils; +@import com.linkedin.drelephant.analysis.Severity +@import scala.Predef; var jobDefIndex = 0 + +@getSeverityColor(severity : Severity) = @{ + var color: String = "#5cb85c"; // LOW or NONE + + if(severity.getText.equalsIgnoreCase("CRITICAL")) { + color = "#d9534f" + } else if(severity.getText.equalsIgnoreCase("SEVERE")) { + color = "#e4804e" + } else if(severity.getText.equalsIgnoreCase("MODERATE")) { + color = "#f0ad4e" + } + + color +} + +@if(results != null && results.nonEmpty) { + @tags.panel(){ Flow History Results: @flowDefPair.getId} { + + @if(graphType.equals("resources")) { + + + + } else { + + + + } + + +
+ + +
+ + +
+ +
+ + +
+ + + + + @for((jobDefPair, jobName) <- idPairToJobNameMap) { + + } + + + + + @for((flowExecPair, jobMap) <- results) { + + + + + + + @for((jobDefPair, jobName) <- idPairToJobNameMap) { + + } + + + } + + +
Flow Executions + Job @{jobDefIndex = jobDefIndex + 1; jobDefIndex}
+ @if(jobName.length > 45) { @jobName.substring(0, 41)... } else { @jobName } +
+ + +
+ +
+ + + + + + + + + + + +
used resources +
+ The resources used by the stage in GB Hours +
+
wasted resources +
+ The total resources wasted by the stage in GB Hours +
+
total runtime +
+ The total running time of the stage in HH:MM:SS +
+
total wait time +
+ The total wait time for the stage in HH:MM:SS +
+
+
+
+
+ Loading... + + @if(jobMap.get(jobDefPair) != null) { + +
+ + + + + + + + + + + +
@Utils.getResourceInGBHours(Utils.getTotalResources(jobMap.get(jobDefPair)) ).split("GB")(0) + @Utils.getResourceInGBHours(Utils.getTotalWastedResources(jobMap.get(jobDefPair)) ).split("GB")(0) + @(Utils.getDurationBreakdown(Utils.getTotalRuntime(jobMap.get(jobDefPair))).split("Hours")(0)) + @(Utils.getDurationBreakdown(Utils.getTotalWaittime(jobMap.get(jobDefPair))).split("Hours")(0)) +
+
+ } +
+
+
+ } +} \ No newline at end of file diff --git a/app/views/results/oldJobHistoryResults.scala.html b/app/views/results/oldJobHistoryResults.scala.html new file mode 100644 index 000000000..d65d4c59a --- /dev/null +++ b/app/views/results/oldJobHistoryResults.scala.html @@ -0,0 +1,111 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +@(jobDefPair: IdUrlPair, results: java.util.Map[IdUrlPair, java.util.List[models.AppResult]], maxStages: Int, + flowExecTimeList:java.util.List[Long]) + +@import com.linkedin.drelephant.analysis.Severity + +@getSeverityColor(severity : Severity) = @{ + var color: String = "#5cb85c"; // LOW or NONE + + if(severity.getText.equalsIgnoreCase("CRITICAL")) { + color = "#d9534f" + } else if(severity.getText.equalsIgnoreCase("SEVERE")) { + color = "#e4804e" + } else if(severity.getText.equalsIgnoreCase("MODERATE")) { + color = "#f0ad4e" + } + + color +} + +@if(results != null && results.nonEmpty) { + @tags.panel(){ Job History Results: @jobDefPair.getId} { + + + +
+ + +
+ + +
+ +
+ + +
+ + + + + + + @for(i <- 1 to maxStages) { + + } + + + + + @for((flowExecPair, jobs) <- results) { + + + + + + + @for(i <- 1 to maxStages) { + + } + + + } + + +
Job ExecutionsStage @i
+ Loading... + + @if(i <= jobs.length) { + @for((appHeuristicResult, jobCount) <- jobs(i-1).yarnAppHeuristicResults.zipWithIndex) { + ◉ + +
+

@appHeuristicResult.yarnAppResult.id

+

@appHeuristicResult.heuristicName

+ + + @for(appHeuristicResultDetail <- appHeuristicResult.yarnAppHeuristicResultDetails) { + + + + + } + +
@appHeuristicResultDetail.name@appHeuristicResultDetail.value
+
+
+ } + } +
+
+
+ } +} \ No newline at end of file diff --git a/app/views/results/oldJobMetricsHistoryResults.scala.html b/app/views/results/oldJobMetricsHistoryResults.scala.html new file mode 100644 index 000000000..3ed968758 --- /dev/null +++ b/app/views/results/oldJobMetricsHistoryResults.scala.html @@ -0,0 +1,152 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +@(jobDefPair: IdUrlPair, graphType: String, results: java.util.Map[IdUrlPair, java.util.List[models.AppResult]], maxStages: Int, + flowExecTimeList:java.util.List[Long]) + +@import com.linkedin.drelephant.analysis.Severity +@import com.linkedin.drelephant.util.Utils; + +@getSeverityColor(severity : Severity) = @{ + var color: String = "#5cb85c"; // LOW or NONE + + if(severity.getText.equalsIgnoreCase("CRITICAL")) { + color = "#d9534f" + } else if(severity.getText.equalsIgnoreCase("SEVERE")) { + color = "#e4804e" + } else if(severity.getText.equalsIgnoreCase("MODERATE")) { + color = "#f0ad4e" + } + + color +} + +@if(results != null && results.nonEmpty) { + @tags.panel(){ Job History Results: @jobDefPair.getId} { + @if(graphType.equals("resources")) { + + + + } else { + + + + } + +
+ +
+ + +
+ +
+ + +
+ + + + + + + @for(i <- 1 to maxStages) { + + } + + + + + @for((flowExecPair, jobs) <- results) { + + + + + + + @for(i <- 1 to maxStages) { + + } + + + } + + +
Job ExecutionsStage @i + + +
+ +
+ + + + + + + + + + + +
used resources +
+ The resources used by the stage in GB Hours +
+
wasted resources +
+ The total resources wasted by the stage in GB Hours +
+
total runtime +
+ The total running time of the stage in HH:MM:SS +
+
total wait time +
+ The total wait time for the stage in HH:MM:SS +
+
+
+
+
+ Loading... + + @if(i <= jobs.length) { +
+ + + + + + + + + + + +
@(Utils.getResourceInGBHours(jobs(i-1).resourceUsed).split("GB")(0)) + @(Utils.getResourceInGBHours(jobs(i-1).resourceWasted).split("GB")(0)) + @(Utils.getDurationBreakdown(jobs(i-1).finishTime-jobs(i-1).startTime).split("Hours")(0)) + @(Utils.getDurationBreakdown(jobs(i-1).totalDelay).split("Hours")(0)) +
+
+ } +
+
+
+ } +} \ No newline at end of file diff --git a/app/views/results/searchResults.scala.html b/app/views/results/searchResults.scala.html new file mode 100644 index 000000000..3985e145a --- /dev/null +++ b/app/views/results/searchResults.scala.html @@ -0,0 +1,40 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +@(title: String, results: java.util.List[models.AppResult]) + +@* +* The layout of the job results. +* +* @param title The title of the search results +* @param results The search results to be displayed +*@ + +
+
+

@title

+
+ @if(results != null && results.nonEmpty) { +
+ @for(result <- results) { + + @tags.jobSummary(result) + + } +
+ } +
diff --git a/app/views/search.scala.html b/app/views/search.scala.html deleted file mode 100644 index 1738d8273..000000000 --- a/app/views/search.scala.html +++ /dev/null @@ -1,59 +0,0 @@ -@()(results: Html) - -@main("Dr. Elephant - Search", "search") { - @tags.column(3) { - @tags.panel(){ Search } { -
-
- - -
-
- - -
-
- -
-
- -
-
- -
-
- -
-
- -
-
- -
-
- -
-
- -
- -
- } - } - @tags.column(9) { - @results - } -} diff --git a/app/views/singlejob.scala.html b/app/views/singlejob.scala.html deleted file mode 100644 index ff53814e5..000000000 --- a/app/views/singlejob.scala.html +++ /dev/null @@ -1,94 +0,0 @@ -@(result: model.JobResult) - -
- @if(result != null) { -
-

-

[@result.username] [@result.jobType.getText()] @result.job_id

-
@result.jobName
-
- - - - - - - @if(result.jobExecUrl != null){ - - - - - } - @if(result.jobUrl != null){ - - - - - } - @if(result.flowExecUrl != null){ - - - - - } - @if(result.flowUrl != null){ - - - - - } - -
Jobtracker:@result.url
Job execution:@result.jobExecUrl
Job definition:@result.jobUrl
Flow execution:@result.flowExecUrl
Flow definition:@result.flowUrl
- @if(result.flowExecUrl != null){ - - } - @if(result.jobUrl != null){ - - } -
- -
 
-
- @for(heuristicResult <- result.heuristicResults) { - @heuristicResult.analysisName - } -
-

-
- - } else { -
-

Results

-
- - } -
diff --git a/app/views/tags/aggregatedMetrics.scala.html b/app/views/tags/aggregatedMetrics.scala.html new file mode 100644 index 000000000..915f43299 --- /dev/null +++ b/app/views/tags/aggregatedMetrics.scala.html @@ -0,0 +1,42 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +@(result: models.AppResult) +@import com.linkedin.drelephant.util.Utils; + +@if(result.resourceUsed != 0) { +

+ + + + + + @if(!result.jobType.equals("Spark")) { + + } + +
used resources @Utils.getResourceInGBHours(result.resourceUsed)
+ The resources used by the job in GB Hours +
wasted resources @(Utils.getPercentage(result.resourceWasted, result.resourceUsed))
+ The percent of resources wasted +
total runtime @(Utils.getDurationBreakdown(result.finishTime - result.startTime))
+ Total runtime of the job in HH:MM:SS +
total wait time @(Utils.getPercentage(result.totalDelay, result.finishTime - result.startTime))
+ The percent of time job waited +
+

+} + diff --git a/app/views/tags/column.scala.html b/app/views/tags/column.scala.html index 3d22b7c66..1d6b70a44 100644 --- a/app/views/tags/column.scala.html +++ b/app/views/tags/column.scala.html @@ -1,3 +1,18 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ @(width: Int)(content: Html)
@content diff --git a/app/views/tags/jobHeader.scala.html b/app/views/tags/jobHeader.scala.html new file mode 100644 index 000000000..eede38f27 --- /dev/null +++ b/app/views/tags/jobHeader.scala.html @@ -0,0 +1,38 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +@(result: models.AppResult) + +@* +* The job header includes the following info, +* The user who ran the job +* The job type +* The job id +* And the analysis time +* +* @param result The job of type AppResult +*@ + +

+

[@result.username] [@result.jobType] @result.id

+
+
@result.name
+

+ + diff --git a/app/views/tags/jobSummary.scala.html b/app/views/tags/jobSummary.scala.html new file mode 100644 index 000000000..98a01df50 --- /dev/null +++ b/app/views/tags/jobSummary.scala.html @@ -0,0 +1,32 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +@(result: models.AppResult) + +@* +* Displays a brief info of the job. +* The job block includes, the job header and the heuristic info. +* +* @param result The job of type AppResult +*@ + +@tags.jobHeader(result) +

+ @for(appHeuristicResult <- result.yarnAppHeuristicResults) { + @appHeuristicResult.heuristicName + } +

+@tags.aggregatedMetrics(result) \ No newline at end of file diff --git a/app/views/tags/pagination.scala.html b/app/views/tags/pagination.scala.html new file mode 100644 index 000000000..ce725b921 --- /dev/null +++ b/app/views/tags/pagination.scala.html @@ -0,0 +1,58 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ + +@(paginationStats: controllers.PaginationStats, route:Call) + +@* +* The layout of the pagination bar. +* +* @param paginationStats The pagination information +* @param route the search call to be made +*@ + + diff --git a/app/views/tags/panel.scala.html b/app/views/tags/panel.scala.html index d5fbab5ed..c9773066a 100644 --- a/app/views/tags/panel.scala.html +++ b/app/views/tags/panel.scala.html @@ -1,3 +1,18 @@ +@* +* Copyright 2016 LinkedIn Corp. +* +* Licensed under the Apache License, Version 2.0 (the "License"); you may not +* use this file except in compliance with the License. You may obtain a copy of +* the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*@ @()(title: Html)(body: Html)
diff --git a/app/views/tags/searchPanel.scala.html b/app/views/tags/searchPanel.scala.html new file mode 100644 index 000000000..e69aace1f --- /dev/null +++ b/app/views/tags/searchPanel.scala.html @@ -0,0 +1,40 @@ + + + +
+
+ + +
+ +
+ +
+ + + +
\ No newline at end of file diff --git a/build.sbt b/build.sbt index 1bf6c1ebe..071e15337 100644 --- a/build.sbt +++ b/build.sbt @@ -1,41 +1,38 @@ +// +// Copyright 2016 LinkedIn Corp. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may not +// use this file except in compliance with the License. You may obtain a copy of +// the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations under +// the License. +// + import play.Project._ +import Dependencies._ name := "dr-elephant" -version := "0.4" +version := "2.0.6" + +organization := "com.linkedin.drelephant" javacOptions in Compile ++= Seq("-source", "1.6", "-target", "1.6") -libraryDependencies ++= Seq( - javaJdbc, - javaEbean, - cache, - "commons-io" % "commons-io" % "2.4", - "mysql" % "mysql-connector-java" % "5.1.22", - "org.apache.commons" % "commons-email" % "1.3.2", - "org.apache.hadoop" % "hadoop-auth" % "2.3.0", - "org.codehaus.jackson" % "jackson-mapper-asl" % "1.7.3", - "org.jsoup" % "jsoup" % "1.7.3" -) - -libraryDependencies ++= ( -if(sys.props.get("hadoop.version").exists(_ == "1")) Seq( - "com.linkedin.li-hadoop" % "hadoop-core" % "1.2.1.45" -) -else if(sys.props.get("hadoop.version").exists(_ == "2")) Seq( - "com.linkedin.li-hadoop" % "hadoop-common" % "2.3.0.27", - "com.linkedin.li-hadoop" % "hadoop-mapreduce-client-core" % "2.3.0.27" -) -else Seq() -) - -val LinkedInPatterns = Patterns( - Seq("[organization]/[module]/[revision]/[module]-[revision].ivy"), - Seq("[organisation]/[module]/[revision]/[artifact]-[revision](-[classifier]).[ext]"), - isMavenCompatible = true) - -val ArtifactoryBaseUrl = "http://artifactory.corp.linkedin.com:8081/artifactory/" - -resolvers += Resolver.url("LI repo repository", url(ArtifactoryBaseUrl + "repo"))(LinkedInPatterns) +libraryDependencies ++= dependencies map { _.excludeAll(exclusionRules: _*) } + +// Create a new custom configuration called compileonly +ivyConfigurations += config("compileonly").hide + +// Append all dependencies with 'compileonly' configuration to unmanagedClasspath in Compile. +unmanagedClasspath in Compile ++= update.value.select(configurationFilter("compileonly")) playJavaSettings + +scalaVersion := "2.10.4" diff --git a/compile.conf b/compile.conf new file mode 100644 index 000000000..b42a248d5 --- /dev/null +++ b/compile.conf @@ -0,0 +1,3 @@ +hadoop_version=2.3.0 +spark_version=1.4.0 +play_opts="-Dsbt.repository.config=app-conf/resolver.conf" diff --git a/compile.sh b/compile.sh index cd99e3c4b..4213fea9f 100755 --- a/compile.sh +++ b/compile.sh @@ -1,36 +1,151 @@ #!/usr/bin/env bash -rm -rf dist +# +# Copyright 2016 LinkedIn Corp. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +function print_usage(){ + echo "usage: ./compile.sh PATH_TO_CONFIG_FILE(optional)" +} + +function play_command() { + if type activator 2>/dev/null; then + activator "$@" + else + play "$@" + fi +} + +# Default configurations +HADOOP_VERSION="2.3.0" +SPARK_VERSION="1.4.0" + +# User should pass an optional argument which is a path to config file +if [ -z "$1" ]; +then + echo "Using the default configuration" +else + CONF_FILE_PATH=$1 + echo "Using config file: "$CONF_FILE_PATH + + # User must give a valid file as argument + if [ -f $CONF_FILE_PATH ]; + then + echo "Reading from config file..." + else + echo "error: Couldn't find a valid config file at: " $CONF_FILE_PATH + print_usage + exit 1 + fi + + source $CONF_FILE_PATH + + # Fetch the Hadoop version + if [ -n "${hadoop_version}" ]; then + HADOOP_VERSION=${hadoop_version} + fi + + # Fetch the Spark version + if [ -n "${spark_version}" ]; then + SPARK_VERSION=${spark_version} + fi + + # Fetch other play opts + if [ -n "${play_opts}" ]; then + PLAY_OPTS=${play_opts} + fi +fi + +echo "Hadoop Version : $HADOOP_VERSION" +echo "Spark Version : $SPARK_VERSION" +echo "Other opts set : $PLAY_OPTS" + +OPTS+=" -Dhadoopversion=$HADOOP_VERSION" +OPTS+=" -Dsparkversion=$SPARK_VERSION" +OPTS+=" $PLAY_OPTS" + + +project_root=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) +cd ${project_root} + +cd ${project_root} + + +#if npm is installed, install bower,ember-cli and other components for new UI + +if hash npm 2>/dev/null; then + echo "############################################################################" + echo "npm installation found, we'll compile with the new user interface" + echo "############################################################################" + set -x + sleep 3 + ember_assets=${project_root}/public/assets + ember_resources_dir=${ember_assets}/ember + ember_web_directory=${project_root}/web + + # cd to the ember directory + cd ${ember_web_directory} + + npm install + node_modules/bower/bin/bower install + node_modules/ember-cli/bin/ember build --prod + rm -r ${ember_resources_dir} 2> /dev/null + mkdir ${ember_resources_dir} + cp dist/assets/dr-elephant.css ${ember_resources_dir}/ + cp dist/assets/dr-elephant.js ${ember_resources_dir}/ + cp dist/assets/vendor.js ${ember_resources_dir}/ + cp dist/assets/vendor.css ${ember_resources_dir}/ + cp -r dist/fonts ${ember_assets}/ + cd ${project_root} +else + echo "############################################################################" + echo "npm installation not found. Please install npm in order to compile with new user interface" + echo "############################################################################" + sleep 3 +fi + +trap "exit" SIGINT SIGTERM + +start_script=${project_root}/scripts/start.sh +stop_script=${project_root}/scripts/stop.sh +app_conf=${project_root}/app-conf + +# Echo the value of pwd in the script so that it is clear what is being removed. +rm -rf ${project_root}/dist mkdir dist -play -Dhadoop.version=1 clean compile test dist +play_command $OPTS clean test compile dist cd target/universal -unzip *.zip +ZIP_NAME=`/bin/ls *.zip` +unzip ${ZIP_NAME} +rm ${ZIP_NAME} +DIST_NAME=${ZIP_NAME%.zip} -DIST_NAME=$(find * -maxdepth 0 -type d -not -name "tmp") +chmod +x ${DIST_NAME}/bin/dr-elephant -sed -i.bak $'/declare -r app_classpath/s/.$/:$HADOOP_HOME\/*:$HADOOP_HOME\/lib\/*:$HADOOP_HOME\/conf"\\\naddJava "-Djava.library.path=$HADOOP_HOME\/lib\/native\/Linux-amd64-64"\\\n/' $DIST_NAME/bin/dr-elephant +# Append hadoop classpath and the ELEPHANT_CONF_DIR to the Classpath +sed -i.bak $'/declare -r app_classpath/s/.$/:`hadoop classpath`:${ELEPHANT_CONF_DIR}"/' ${DIST_NAME}/bin/dr-elephant -chmod +x $DIST_NAME/bin/dr-elephant +cp $start_script ${DIST_NAME}/bin/ -zip -r $DIST_NAME-h1.zip $DIST_NAME -x *.zip tmp/\* * +cp $stop_script ${DIST_NAME}/bin/ -mv $DIST_NAME-h1.zip ../../dist/ +cp -r $app_conf ${DIST_NAME} -cd ../../ +zip -r ${DIST_NAME}.zip ${DIST_NAME} -play -Dhadoop.version=2 clean compile test dist - -cd target/universal - -unzip *.zip - -sed -i.bak $'/declare -r app_classpath/s/.$/:$HADOOP_HOME\/share\/hadoop\/common\/*:$HADOOP_HOME\/share\/hadoop\/common\/lib\/*:$HADOOP_HOME\/share\/hadoop\/hdfs\/*:$HADOOP_CONF_DIR"\\\naddJava "-Djava.library.path=$HADOOP_HOME\/lib\/native"\\\n/' $DIST_NAME/bin/dr-elephant - -chmod +x $DIST_NAME/bin/dr-elephant - -zip -r $DIST_NAME-h2.zip $DIST_NAME -x *.zip tmp/\* * - -mv $DIST_NAME-h2.zip ../../dist/ +mv ${DIST_NAME}.zip ${project_root}/dist/ diff --git a/conf/application.conf b/conf/application.conf index db166e9d0..3d173b94c 100644 --- a/conf/application.conf +++ b/conf/application.conf @@ -1,3 +1,19 @@ +# +# Copyright 2016 LinkedIn Corp. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + # This is the main configuration file for the application. # ~~~~~ @@ -5,7 +21,8 @@ # ~~~~~ # The secret key is used to secure cryptographics functions. # If you deploy your application to several instances be sure to use the same key! -application.secret="c85x?bXBZk]1mjCObKaUgnoO^fwFbB3uR0qwgsb8RBs2]]Ma9VucOE0/]joh9Nlj" +application.secret="changeme" +application.secret=${?APPLICATION_SECRET} # The application languages # ~~~~~ @@ -32,11 +49,7 @@ application.global=Global # ~~~~~ # You can declare as many datasources as you want. # By convention, the default datasource is named `default` -# db.default.driver=com.mysql.jdbc.Driver -db.default.url="jdbc:mysql://localhost/drelephant?characterEncoding=UTF-8" -db.default.user=root -db.default.password="" # # @@ -46,20 +59,15 @@ db.default.password="" # Evolutions # ~~~~~ # You can disable evolutions if needed -evolutionplugin=disabled -# applyEvolutions.default=true +# evolutionplugin=disabled +applyEvolutions.default=true # Ebean configuration # ~~~~~ # You can declare as many Ebean servers as you want. # By convention, the default server is named `default` # -ebean.default="model.*" - -# Logger -# ~~~~~ -# You can also configure logback (http://logback.qos.ch/), -# by providing an application-logger.xml file in the conf directory. +ebean.default= "models.*" # Root logger: logger.root=ERROR @@ -69,14 +77,3 @@ logger.play=INFO # Logger provided to your application: logger.application=DEBUG - -# Emailer -smtp.host=email.corp.linkedin.com -smtp.port=25 -smtp.from="azkaban-noreply@linkedin.com" -# smtp.user=azkaban-noreply -# smtp.password= - -#Kerberos -keytab.location="/export/apps/hadoop/keytabs/dr_elephant-service.keytab" -keytab.user="elephant/eat1-magicaz01.grid.linkedin.com" \ No newline at end of file diff --git a/conf/evolutions/default/1.sql b/conf/evolutions/default/1.sql index b6f17abc6..14b82a6e7 100644 --- a/conf/evolutions/default/1.sql +++ b/conf/evolutions/default/1.sql @@ -1,48 +1,92 @@ +# +# Copyright 2016 LinkedIn Corp. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + # --- !Ups -create table job_heuristic_result ( - id integer auto_increment not null, - job_job_id varchar(50), - severity integer, - analysis_name varchar(255), - data longtext, - data_columns integer, - constraint ck_job_heuristic_result_severity check (severity in ('2','4','1','3','0')), - constraint pk_job_heuristic_result primary key (id)) -; - -create table job_result ( - job_id varchar(50) not null, - username varchar(50), - job_name varchar(100), - start_time bigint, - analysis_time bigint, - severity integer, - job_type varchar(6), - url varchar(200), - cluster varchar(100), - meta_urls longtext, - constraint ck_job_result_severity check (severity in ('2','4','1','3','0')), - constraint ck_job_result_job_type check (job_type in ('Pig','Hive','Hadoop')), - constraint pk_job_result primary key (job_id)) -; - -alter table job_heuristic_result add constraint fk_job_heuristic_result_job_1 foreign key (job_job_id) references job_result (job_id) on delete restrict on update restrict; -create index ix_job_heuristic_result_job_1 on job_heuristic_result (job_job_id); -create index ix_job_result_username_1 on job_result (username); -create index ix_job_result_analysis_time_1 on job_result (analysis_time); -create index ix_job_result_severity_1 on job_result (severity); -create index ix_job_result_cluster_1 on job_result (cluster); +CREATE TABLE yarn_app_result ( + id VARCHAR(50) NOT NULL COMMENT 'The application id, e.g., application_1236543456321_1234567', + name VARCHAR(100) NOT NULL COMMENT 'The application name', + username VARCHAR(50) NOT NULL COMMENT 'The user who started the application', + queue_name VARCHAR(50) DEFAULT NULL COMMENT 'The queue the application was submitted to', + start_time BIGINT UNSIGNED NOT NULL COMMENT 'The time in which application started', + finish_time BIGINT UNSIGNED NOT NULL COMMENT 'The time in which application finished', + tracking_url VARCHAR(255) NOT NULL COMMENT 'The web URL that can be used to track the application', + job_type VARCHAR(20) NOT NULL COMMENT 'The Job Type e.g, Pig, Hive, Spark, HadoopJava', + severity TINYINT(2) UNSIGNED NOT NULL COMMENT 'Aggregate severity of all the heuristics. Ranges from 0(LOW) to 4(CRITICAL)', + score MEDIUMINT(9) UNSIGNED DEFAULT 0 COMMENT 'The application score which is the sum of heuristic scores', + workflow_depth TINYINT(2) UNSIGNED DEFAULT 0 COMMENT 'The application depth in the scheduled flow. Depth starts from 0', + scheduler VARCHAR(20) DEFAULT NULL COMMENT 'The scheduler which triggered the application', + job_name VARCHAR(255) NOT NULL DEFAULT '' COMMENT 'The name of the job in the flow to which this app belongs', + job_exec_id VARCHAR(800) NOT NULL DEFAULT '' COMMENT 'A unique reference to a specific execution of the job/action(job in the workflow). This should filter all applications (mapreduce/spark) triggered by the job for a particular execution.', + flow_exec_id VARCHAR(255) NOT NULL DEFAULT '' COMMENT 'A unique reference to a specific flow execution. This should filter all applications fired by a particular flow execution. Note that if the scheduler supports sub-workflows, then this ID should be the super parent flow execution id that triggered the the applications and sub-workflows.', + job_def_id VARCHAR(800) NOT NULL DEFAULT '' COMMENT 'A unique reference to the job in the entire flow independent of the execution. This should filter all the applications(mapreduce/spark) triggered by the job for all the historic executions of that job.', + flow_def_id VARCHAR(800) NOT NULL DEFAULT '' COMMENT 'A unique reference to the entire flow independent of any execution. This should filter all the historic mr jobs belonging to the flow. Note that if your scheduler supports sub-workflows, then this ID should reference the super parent flow that triggered the all the jobs and sub-workflows.', + job_exec_url VARCHAR(800) NOT NULL DEFAULT '' COMMENT 'A url to the job execution on the scheduler', + flow_exec_url VARCHAR(800) NOT NULL DEFAULT '' COMMENT 'A url to the flow execution on the scheduler', + job_def_url VARCHAR(800) NOT NULL DEFAULT '' COMMENT 'A url to the job definition on the scheduler', + flow_def_url VARCHAR(800) NOT NULL DEFAULT '' COMMENT 'A url to the flow definition on the scheduler', + + PRIMARY KEY (id) +); + +create index yarn_app_result_i1 on yarn_app_result (finish_time); +create index yarn_app_result_i2 on yarn_app_result (username,finish_time); +create index yarn_app_result_i3 on yarn_app_result (job_type,username,finish_time); +create index yarn_app_result_i4 on yarn_app_result (flow_exec_id); +create index yarn_app_result_i5 on yarn_app_result (job_def_id); +create index yarn_app_result_i6 on yarn_app_result (flow_def_id); +create index yarn_app_result_i7 on yarn_app_result (start_time); + +CREATE TABLE yarn_app_heuristic_result ( + id INT(11) NOT NULL AUTO_INCREMENT COMMENT 'The application heuristic result id', + yarn_app_result_id VARCHAR(50) NOT NULL COMMENT 'The application id', + heuristic_class VARCHAR(255) NOT NULL COMMENT 'Name of the JVM class that implements this heuristic', + heuristic_name VARCHAR(128) NOT NULL COMMENT 'The heuristic name', + severity TINYINT(2) UNSIGNED NOT NULL COMMENT 'The heuristic severity ranging from 0(LOW) to 4(CRITICAL)', + score MEDIUMINT(9) UNSIGNED DEFAULT 0 COMMENT 'The heuristic score for the application. score = severity * number_of_tasks(map/reduce) where severity not in [0,1], otherwise score = 0', + + PRIMARY KEY (id), + CONSTRAINT yarn_app_heuristic_result_f1 FOREIGN KEY (yarn_app_result_id) REFERENCES yarn_app_result (id) +); +create index yarn_app_heuristic_result_i1 on yarn_app_heuristic_result (yarn_app_result_id); +create index yarn_app_heuristic_result_i2 on yarn_app_heuristic_result (heuristic_name,severity); + +CREATE TABLE yarn_app_heuristic_result_details ( + yarn_app_heuristic_result_id INT(11) NOT NULL COMMENT 'The application heuristic result id', + name VARCHAR(128) NOT NULL DEFAULT '' COMMENT 'The analysis detail entry name/key', + value VARCHAR(255) NOT NULL DEFAULT '' COMMENT 'The analysis detail value corresponding to the name', + details TEXT COMMENT 'More information on analysis details. e.g, stacktrace', + + PRIMARY KEY (yarn_app_heuristic_result_id,name), + CONSTRAINT yarn_app_heuristic_result_details_f1 FOREIGN KEY (yarn_app_heuristic_result_id) REFERENCES yarn_app_heuristic_result (id) +); + +create index yarn_app_heuristic_result_details_i1 on yarn_app_heuristic_result_details (name); # --- !Downs SET FOREIGN_KEY_CHECKS=0; -drop table job_heuristic_result; +DROP TABLE yarn_app_result; + +DROP TABLE yarn_app_heuristic_result; -drop table job_result; +DROP TABLE yarn_app_heuristic_result_details; SET FOREIGN_KEY_CHECKS=1; diff --git a/conf/evolutions/default/2.sql b/conf/evolutions/default/2.sql index 7a56dc4f0..9f9e39e51 100644 --- a/conf/evolutions/default/2.sql +++ b/conf/evolutions/default/2.sql @@ -1,19 +1,9 @@ -# Add urls - +# --- Indexing on queue for seach by queue feature # --- !Ups -alter table job_result - add column job_exec_url varchar(200), - add column job_url varchar(200), - add column flow_exec_url varchar(200), - add column flow_url varchar(200), - drop column meta_urls; +create index yarn_app_result_i8 on yarn_app_result (queue_name); # --- !Downs -alter table job_result - drop column job_exec_url - drop column job_url - drop column flow_exec_utl - drop column flow_url - add column meta_urls longtext; +drop index yarn_app_result_i8 on yarn_app_result; + diff --git a/conf/evolutions/default/3.sql b/conf/evolutions/default/3.sql index 87bd292b3..a8cd34093 100644 --- a/conf/evolutions/default/3.sql +++ b/conf/evolutions/default/3.sql @@ -1,17 +1,16 @@ -# change col size - +# --- Indexing on queue for seach by queue feature # --- !Ups -alter table job_result - modify column job_exec_url varchar(2048), - modify column job_url varchar(2048), - modify column flow_exec_url varchar(2048), - modify column flow_url varchar(2048); +alter table yarn_app_result add column resource_used BIGINT UNSIGNED DEFAULT 0 COMMENT 'The resources used by the job in MB Seconds'; +alter table yarn_app_result add column resource_wasted BIGINT UNSIGNED DEFAULT 0 COMMENT 'The resources wasted by the job in MB Seconds'; +alter table yarn_app_result add column total_delay BIGINT UNSIGNED DEFAULT 0 COMMENT 'The total delay in starting of mappers and reducers'; # --- !Downs -alter table job_result - modify column job_exec_url varchar(200), - modify column job_url varchar(200), - modify column flow_exec_url varchar(200), - modify column flow_url varchar(200); +alter table yarn_app_result drop resource_used; +alter table yarn_app_result drop resource_wasted; +alter table yarn_app_result drop total_delay; + + + + diff --git a/conf/log4j.properties b/conf/log4j.properties new file mode 100644 index 000000000..7810bea8d --- /dev/null +++ b/conf/log4j.properties @@ -0,0 +1,27 @@ +# +# Copyright 2016 LinkedIn Corp. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +#Define the root logger with appender file +log4j.rootLogger = INFO, FA + +#File Appender +log4j.appender.FA=org.apache.log4j.DailyRollingFileAppender +log4j.appender.FA.File=../logs/elephant/dr_elephant.log +log4j.appender.FA.layout=org.apache.log4j.PatternLayout +log4j.appender.FA.layout.ConversionPattern=%d{MM-dd-yyyy HH:mm:ss} %-5p [%t] %c %x: %m%n +log4j.appender.FA.DatePattern='.'yyyy-MM-dd +log4j.appender.FA.ImmediateFlush=true +log4j.appender.FA.Append=true diff --git a/conf/routes b/conf/routes index e7e129585..8fc6b09ab 100644 --- a/conf/routes +++ b/conf/routes @@ -1,19 +1,67 @@ +# +# Copyright 2016 LinkedIn Corp. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + # Routes # This file defines all application routes (Higher priority routes first) # ~~~~ -# Home page -GET / controllers.Application.dashboard(page: Int = 0) -GET /dashboard/:page controllers.Application.dashboard(page: Int) -GET /help controllers.Application.help() -GET /email controllers.Application.testEmail() -GET /search controllers.Application.search() -GET /flowrelated controllers.Application.flowRelated() -GET /alljobexecs controllers.Application.allJobExecs() -GET /rest/job controllers.Application.restJobResult(id: String) -GET /rest/jobexec controllers.Application.restJobExecResult(url: String) -GET /rest/flowexec controllers.Application.restFlowExecResult(url: String) +# Application calls +GET / controllers.Application.dashboard() +GET /help controllers.Application.oldHelp() +GET /search controllers.Application.search() +GET /compare controllers.Application.compare() +GET /flowhistory controllers.Application.oldFlowHistory() +GET /jobhistory controllers.Application.oldJobHistory() + +# Routes for new user interface + +GET /newflowhistory controllers.Application.flowHistory() +GET /newjobhistory controllers.Application.jobHistory() +GET /newhelp controllers.Application.help() + +# Rest calls +GET /rest/job controllers.Application.restAppResult(id: String) +GET /rest/jobexec controllers.Application.restJobExecResult(id: String) +GET /rest/flowexec controllers.Application.restFlowExecResult(id: String) +GET /rest/search controllers.Application.restSearch() +GET /rest/compare controllers.Application.restCompare() +GET /rest/flowgraphdata controllers.Application.restFlowGraphData(id: String) +GET /rest/jobgraphdata controllers.Application.restJobGraphData(id: String) +GET /rest/jobmetricsgraphdata controllers.Application.restJobMetricsGraphData(id: String) +GET /rest/flowmetricsgraphdata controllers.Application.restFlowMetricsGraphData(id: String) +GET /rest/dashboard-summaries controllers.api.v1.Web.restDashboardSummaries() +GET /rest/workflow-summaries controllers.api.v1.Web.restWorkflowSummariesForUser(username: String) +GET /rest/job-summaries controllers.api.v1.Web.restJobSummariesForUser(username: String) +GET /rest/application-summaries controllers.api.v1.Web.restApplicationSummariesForUser(username: String) +GET /rest/workflows controllers.api.v1.Web.restWorkflowFromFlowId(workflowid: String) +GET /rest/jobs controllers.api.v1.Web.restJobFromJobId(jobid: String) +GET /rest/applications controllers.api.v1.Web.restApplicationFromApplicationId(applicationid: String) +GET /rest/search-options controllers.api.v1.Web.restSearchOptions() +GET /rest/search-results controllers.api.v1.Web.search() +GET /rest/resourceusagedatabyuser controllers.Application.restResourceUsageDataByUser(startTime: String, endTime: String) +GET /rest/user-details controllers.api.v1.Web.restGetUsersSummaryStats() +GET /rest/workflow-exceptions controllers.api.v1.Web.restExceptions() +GET /rest/exception-statuses controllers.api.v1.Web.restExceptionStatuses() +# Metrics calls +GET /ping controllers.MetricsController.ping() +GET /metrics controllers.MetricsController.index() +GET /healthcheck controllers.MetricsController.healthcheck() # Map static resources from the /public folder to the /assets URL path -GET /assets/*file controllers.Assets.at(path="/public", file) +GET /assets/*file controllers.Assets.at(path="/public", file) +GET /new controllers.Application.serveAsset(path="index.html") +GET /new/*path controllers.Application.serveAsset(path) diff --git a/images/wiki/compare-page.png b/images/wiki/compare-page.png new file mode 100644 index 000000000..16e3965a2 Binary files /dev/null and b/images/wiki/compare-page.png differ diff --git a/images/wiki/critical.png b/images/wiki/critical.png new file mode 100644 index 000000000..26e83f623 Binary files /dev/null and b/images/wiki/critical.png differ diff --git a/images/wiki/dashboard.png b/images/wiki/dashboard.png new file mode 100755 index 000000000..4ae30ab26 Binary files /dev/null and b/images/wiki/dashboard.png differ diff --git a/images/wiki/dr-elephant-logo-150x150.png b/images/wiki/dr-elephant-logo-150x150.png new file mode 100644 index 000000000..a7f7b9670 Binary files /dev/null and b/images/wiki/dr-elephant-logo-150x150.png differ diff --git a/images/wiki/dr-elephant-logo-300x300.png b/images/wiki/dr-elephant-logo-300x300.png new file mode 100644 index 000000000..1b1f9cc83 Binary files /dev/null and b/images/wiki/dr-elephant-logo-300x300.png differ diff --git a/images/wiki/flow-history.png b/images/wiki/flow-history.png new file mode 100644 index 000000000..f6aab6a3a Binary files /dev/null and b/images/wiki/flow-history.png differ diff --git a/images/wiki/flowhistorymetrics.png b/images/wiki/flowhistorymetrics.png new file mode 100644 index 000000000..2c60aa9fe Binary files /dev/null and b/images/wiki/flowhistorymetrics.png differ diff --git a/images/wiki/job-history.png b/images/wiki/job-history.png new file mode 100644 index 000000000..5862ece5b Binary files /dev/null and b/images/wiki/job-history.png differ diff --git a/images/wiki/jobdetails.png b/images/wiki/jobdetails.png new file mode 100755 index 000000000..1c293b7a1 Binary files /dev/null and b/images/wiki/jobdetails.png differ diff --git a/images/wiki/jobhistorymetrics.png b/images/wiki/jobhistorymetrics.png new file mode 100644 index 000000000..9582763c0 Binary files /dev/null and b/images/wiki/jobhistorymetrics.png differ diff --git a/images/wiki/low.png b/images/wiki/low.png new file mode 100644 index 000000000..7f23d04a6 Binary files /dev/null and b/images/wiki/low.png differ diff --git a/images/wiki/moderate.png b/images/wiki/moderate.png new file mode 100644 index 000000000..c5c129c29 Binary files /dev/null and b/images/wiki/moderate.png differ diff --git a/images/wiki/none.png b/images/wiki/none.png new file mode 100644 index 000000000..7f23d04a6 Binary files /dev/null and b/images/wiki/none.png differ diff --git a/images/wiki/search.png b/images/wiki/search.png new file mode 100755 index 000000000..beb06801a Binary files /dev/null and b/images/wiki/search.png differ diff --git a/images/wiki/severe.png b/images/wiki/severe.png new file mode 100644 index 000000000..6be36c201 Binary files /dev/null and b/images/wiki/severe.png differ diff --git a/images/wiki/suggestions.png b/images/wiki/suggestions.png new file mode 100755 index 000000000..91c1a603e Binary files /dev/null and b/images/wiki/suggestions.png differ diff --git a/jacoco.sbt b/jacoco.sbt new file mode 100644 index 000000000..872467a0f --- /dev/null +++ b/jacoco.sbt @@ -0,0 +1,28 @@ +// +// Copyright 2016 LinkedIn Corp. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may not +// use this file except in compliance with the License. You may obtain a copy of +// the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations under +// the License. +// + +import de.johoop.jacoco4sbt.JacocoPlugin._ +import de.johoop.jacoco4sbt._ + +jacoco.settings + +parallelExecution in jacoco.Config := false + +jacoco.outputDirectory in jacoco.Config := file("target/jacoco") + +jacoco.reportFormats in jacoco.Config := Seq(XMLReport(encoding = "utf-8"), HTMLReport("utf-8")) + +jacoco.excludes in jacoco.Config := Seq("views*", "*Routes*", "controllers*routes*", "controllers*Reverse*", "controllers*javascript*", "controller*ref*") \ No newline at end of file diff --git a/project/Dependencies.scala b/project/Dependencies.scala new file mode 100644 index 000000000..462563ea2 --- /dev/null +++ b/project/Dependencies.scala @@ -0,0 +1,106 @@ +// +// Copyright 2016 LinkedIn Corp. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may not +// use this file except in compliance with the License. You may obtain a copy of +// the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations under +// the License. +// + +import play.Project._ +import sbt._ + +object Dependencies { + + // Dependency Version + lazy val commonsCodecVersion = "1.10" + lazy val commonsIoVersion = "2.4" + lazy val gsonVersion = "2.2.4" + lazy val guavaVersion = "18.0" // Hadoop defaultly are using guava 11.0, might raise NoSuchMethodException + lazy val jacksonMapperAslVersion = "1.7.3" + lazy val jacksonVersion = "2.5.3" + lazy val jerseyVersion = "2.24" + lazy val jsoupVersion = "1.7.3" + lazy val mysqlConnectorVersion = "5.1.36" + lazy val oozieClientVersion = "4.2.0" + + lazy val HADOOP_VERSION = "hadoopversion" + lazy val SPARK_VERSION = "sparkversion" + + var hadoopVersion = "2.3.0" + if (System.getProperties.getProperty(HADOOP_VERSION) != null) { + hadoopVersion = System.getProperties.getProperty(HADOOP_VERSION) + } + + var sparkVersion = "1.4.0" + if (System.getProperties.getProperty(SPARK_VERSION) != null) { + sparkVersion = System.getProperties.getProperty(SPARK_VERSION) + } + + val sparkExclusion = if (sparkVersion >= "1.5.0") { + "org.apache.spark" % "spark-core_2.10" % sparkVersion excludeAll( + ExclusionRule(organization = "com.typesafe.akka"), + ExclusionRule(organization = "org.apache.avro"), + ExclusionRule(organization = "org.apache.hadoop"), + ExclusionRule(organization = "net.razorvine") + ) + } else { + "org.apache.spark" % "spark-core_2.10" % sparkVersion excludeAll( + ExclusionRule(organization = "org.apache.avro"), + ExclusionRule(organization = "org.apache.hadoop"), + ExclusionRule(organization = "net.razorvine") + ) + } + + // Dependency coordinates + var requiredDep = Seq( + "com.google.code.gson" % "gson" % gsonVersion, + "com.google.guava" % "guava" % guavaVersion, + "com.jsuereth" %% "scala-arm" % "1.4", + "commons-codec" % "commons-codec" % commonsCodecVersion, + "commons-io" % "commons-io" % commonsIoVersion, + "javax.ws.rs" % "javax.ws.rs-api" % "2.0.1", + "mysql" % "mysql-connector-java" % mysqlConnectorVersion, + "org.apache.hadoop" % "hadoop-auth" % hadoopVersion % "compileonly", + "org.apache.hadoop" % "hadoop-mapreduce-client-core" % hadoopVersion % "compileonly", + "org.apache.hadoop" % "hadoop-mapreduce-client-core" % hadoopVersion % Test, + "org.apache.hadoop" % "hadoop-common" % hadoopVersion % "compileonly", + "org.apache.hadoop" % "hadoop-common" % hadoopVersion % Test, + "org.apache.hadoop" % "hadoop-hdfs" % hadoopVersion % "compileonly", + "org.apache.hadoop" % "hadoop-hdfs" % hadoopVersion % Test, + "org.jsoup" % "jsoup" % jsoupVersion, + "org.apache.oozie" % "oozie-client" % oozieClientVersion excludeAll( + ExclusionRule(organization = "org.apache.hadoop") + ), + "org.glassfish.jersey.core" % "jersey-client" % jerseyVersion, + "org.glassfish.jersey.core" % "jersey-common" % jerseyVersion, + "org.glassfish.jersey.media" % "jersey-media-json-jackson" % jerseyVersion % Test, + "org.glassfish.jersey.test-framework" % "jersey-test-framework-core" % jerseyVersion % Test, + "org.glassfish.jersey.test-framework.providers" % "jersey-test-framework-provider-grizzly2" % jerseyVersion % Test, + "com.fasterxml.jackson.core" % "jackson-databind" % jacksonVersion, + "com.fasterxml.jackson.module" %% "jackson-module-scala" % jacksonVersion, + "io.dropwizard.metrics" % "metrics-core" % "3.1.2", + "io.dropwizard.metrics" % "metrics-healthchecks" % "3.1.2", + "org.mockito" % "mockito-core" % "1.10.19" exclude ("org.hamcrest", "hamcrest-core"), + "org.jmockit" % "jmockit" % "1.23" % Test, + "org.scala-lang.modules" %% "scala-async" % "0.9.5", + "org.apache.httpcomponents" % "httpclient" % "4.5.2", + "org.scalatest" %% "scalatest" % "3.0.0" % Test + + ) :+ sparkExclusion + + var dependencies = Seq(javaJdbc, javaEbean, cache) + dependencies ++= requiredDep + + val exclusionRules = Seq( + ExclusionRule(organization = "com.sun.jersey", name = "jersey-core"), + ExclusionRule(organization = "com.sun.jersey", name = "jersey-server") + ) +} diff --git a/project/build.properties b/project/build.properties index 0974fce44..bb96499e0 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1,17 @@ -sbt.version=0.13.0 +# +# Copyright 2016 LinkedIn Corp. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +sbt.version=0.13.2 \ No newline at end of file diff --git a/project/plugins.sbt b/project/plugins.sbt index 53e581df0..211de6dae 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,3 +1,18 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. +*/ // Comment to get more information during initialization logLevel := Level.Warn @@ -5,4 +20,7 @@ logLevel := Level.Warn resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/" // Use the Play sbt plugin for Play projects -addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.2.2") +addSbtPlugin("com.typesafe.play" % "sbt-plugin" % Option(System.getProperty("play.version")).getOrElse("2.2.2")) + +// Jacoco code coverage plugin +addSbtPlugin("de.johoop" % "jacoco4sbt" % "2.1.6") diff --git a/public/assets/analytics/track.js b/public/assets/analytics/track.js new file mode 100644 index 000000000..e5e099f45 --- /dev/null +++ b/public/assets/analytics/track.js @@ -0,0 +1,6 @@ +/* + Paste the tracking javascript snippet into this file for web analytics. + This javascript file will be included into every page in the application. + See comments section above 'enable.analytics' in the application.conf file for + more information. +*/ \ No newline at end of file diff --git a/public/assets/bootstrap-datepicker/css/datepicker.css b/public/assets/bootstrap-datepicker/css/datepicker.css new file mode 100755 index 000000000..3dbdf4173 --- /dev/null +++ b/public/assets/bootstrap-datepicker/css/datepicker.css @@ -0,0 +1,472 @@ +/*! + * Datepicker for Bootstrap v1.5.1 (https://github.com/eternicode/bootstrap-datepicker) + * + * Copyright 2012 Stefan Petre + * Improvements by Andrew Rowls + * Licensed under the Apache License v2.0 (http://www.apache.org/licenses/LICENSE-2.0) + */ +.datepicker { + padding: 4px; + -webkit-border-radius: 4px; + -moz-border-radius: 4px; + border-radius: 4px; + direction: ltr; +} +.datepicker-inline { + width: 220px; +} +.datepicker.datepicker-rtl { + direction: rtl; +} +.datepicker.datepicker-rtl table tr td span { + float: right; +} +.datepicker-dropdown { + top: 0; + left: 0; +} +.datepicker-dropdown:before { + content: ''; + display: inline-block; + border-left: 7px solid transparent; + border-right: 7px solid transparent; + border-bottom: 7px solid #999999; + border-top: 0; + border-bottom-color: rgba(0, 0, 0, 0.2); + position: absolute; +} +.datepicker-dropdown:after { + content: ''; + display: inline-block; + border-left: 6px solid transparent; + border-right: 6px solid transparent; + border-bottom: 6px solid #ffffff; + border-top: 0; + position: absolute; +} +.datepicker-dropdown.datepicker-orient-left:before { + left: 6px; +} +.datepicker-dropdown.datepicker-orient-left:after { + left: 7px; +} +.datepicker-dropdown.datepicker-orient-right:before { + right: 6px; +} +.datepicker-dropdown.datepicker-orient-right:after { + right: 7px; +} +.datepicker-dropdown.datepicker-orient-bottom:before { + top: -7px; +} +.datepicker-dropdown.datepicker-orient-bottom:after { + top: -6px; +} +.datepicker-dropdown.datepicker-orient-top:before { + bottom: -7px; + border-bottom: 0; + border-top: 7px solid #999999; +} +.datepicker-dropdown.datepicker-orient-top:after { + bottom: -6px; + border-bottom: 0; + border-top: 6px solid #ffffff; +} +.datepicker > div { + display: none; +} +.datepicker table { + margin: 0; + -webkit-touch-callout: none; + -webkit-user-select: none; + -khtml-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; +} +.datepicker td, +.datepicker th { + text-align: center; + width: 20px; + height: 20px; + -webkit-border-radius: 4px; + -moz-border-radius: 4px; + border-radius: 4px; + border: none; +} +.table-striped .datepicker table tr td, +.table-striped .datepicker table tr th { + background-color: transparent; +} +.datepicker table tr td.day:hover, +.datepicker table tr td.day.focused { + background: #eeeeee; + cursor: pointer; +} +.datepicker table tr td.old, +.datepicker table tr td.new { + color: #999999; +} +.datepicker table tr td.disabled, +.datepicker table tr td.disabled:hover { + background: none; + color: #999999; + cursor: default; +} +.datepicker table tr td.highlighted { + background: #d9edf7; + border-radius: 0; +} +.datepicker table tr td.today, +.datepicker table tr td.today:hover, +.datepicker table tr td.today.disabled, +.datepicker table tr td.today.disabled:hover { + background-color: #fde19a; + background-image: -moz-linear-gradient(to bottom, #fdd49a, #fdf59a); + background-image: -ms-linear-gradient(to bottom, #fdd49a, #fdf59a); + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#fdd49a), to(#fdf59a)); + background-image: -webkit-linear-gradient(to bottom, #fdd49a, #fdf59a); + background-image: -o-linear-gradient(to bottom, #fdd49a, #fdf59a); + background-image: linear-gradient(to bottom, #fdd49a, #fdf59a); + background-repeat: repeat-x; + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fdd49a', endColorstr='#fdf59a', GradientType=0); + border-color: #fdf59a #fdf59a #fbed50; + border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); + filter: progid:DXImageTransform.Microsoft.gradient(enabled=false); + color: #000; +} +.datepicker table tr td.today:hover, +.datepicker table tr td.today:hover:hover, +.datepicker table tr td.today.disabled:hover, +.datepicker table tr td.today.disabled:hover:hover, +.datepicker table tr td.today:active, +.datepicker table tr td.today:hover:active, +.datepicker table tr td.today.disabled:active, +.datepicker table tr td.today.disabled:hover:active, +.datepicker table tr td.today.active, +.datepicker table tr td.today:hover.active, +.datepicker table tr td.today.disabled.active, +.datepicker table tr td.today.disabled:hover.active, +.datepicker table tr td.today.disabled, +.datepicker table tr td.today:hover.disabled, +.datepicker table tr td.today.disabled.disabled, +.datepicker table tr td.today.disabled:hover.disabled, +.datepicker table tr td.today[disabled], +.datepicker table tr td.today:hover[disabled], +.datepicker table tr td.today.disabled[disabled], +.datepicker table tr td.today.disabled:hover[disabled] { + background-color: #fdf59a; +} +.datepicker table tr td.today:active, +.datepicker table tr td.today:hover:active, +.datepicker table tr td.today.disabled:active, +.datepicker table tr td.today.disabled:hover:active, +.datepicker table tr td.today.active, +.datepicker table tr td.today:hover.active, +.datepicker table tr td.today.disabled.active, +.datepicker table tr td.today.disabled:hover.active { + background-color: #fbf069 \9; +} +.datepicker table tr td.today:hover:hover { + color: #000; +} +.datepicker table tr td.today.active:hover { + color: #fff; +} +.datepicker table tr td.range, +.datepicker table tr td.range:hover, +.datepicker table tr td.range.disabled, +.datepicker table tr td.range.disabled:hover { + background: #eeeeee; + -webkit-border-radius: 0; + -moz-border-radius: 0; + border-radius: 0; +} +.datepicker table tr td.range.today, +.datepicker table tr td.range.today:hover, +.datepicker table tr td.range.today.disabled, +.datepicker table tr td.range.today.disabled:hover { + background-color: #f3d17a; + background-image: -moz-linear-gradient(to bottom, #f3c17a, #f3e97a); + background-image: -ms-linear-gradient(to bottom, #f3c17a, #f3e97a); + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#f3c17a), to(#f3e97a)); + background-image: -webkit-linear-gradient(to bottom, #f3c17a, #f3e97a); + background-image: -o-linear-gradient(to bottom, #f3c17a, #f3e97a); + background-image: linear-gradient(to bottom, #f3c17a, #f3e97a); + background-repeat: repeat-x; + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#f3c17a', endColorstr='#f3e97a', GradientType=0); + border-color: #f3e97a #f3e97a #edde34; + border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); + filter: progid:DXImageTransform.Microsoft.gradient(enabled=false); + -webkit-border-radius: 0; + -moz-border-radius: 0; + border-radius: 0; +} +.datepicker table tr td.range.today:hover, +.datepicker table tr td.range.today:hover:hover, +.datepicker table tr td.range.today.disabled:hover, +.datepicker table tr td.range.today.disabled:hover:hover, +.datepicker table tr td.range.today:active, +.datepicker table tr td.range.today:hover:active, +.datepicker table tr td.range.today.disabled:active, +.datepicker table tr td.range.today.disabled:hover:active, +.datepicker table tr td.range.today.active, +.datepicker table tr td.range.today:hover.active, +.datepicker table tr td.range.today.disabled.active, +.datepicker table tr td.range.today.disabled:hover.active, +.datepicker table tr td.range.today.disabled, +.datepicker table tr td.range.today:hover.disabled, +.datepicker table tr td.range.today.disabled.disabled, +.datepicker table tr td.range.today.disabled:hover.disabled, +.datepicker table tr td.range.today[disabled], +.datepicker table tr td.range.today:hover[disabled], +.datepicker table tr td.range.today.disabled[disabled], +.datepicker table tr td.range.today.disabled:hover[disabled] { + background-color: #f3e97a; +} +.datepicker table tr td.range.today:active, +.datepicker table tr td.range.today:hover:active, +.datepicker table tr td.range.today.disabled:active, +.datepicker table tr td.range.today.disabled:hover:active, +.datepicker table tr td.range.today.active, +.datepicker table tr td.range.today:hover.active, +.datepicker table tr td.range.today.disabled.active, +.datepicker table tr td.range.today.disabled:hover.active { + background-color: #efe24b \9; +} +.datepicker table tr td.selected, +.datepicker table tr td.selected:hover, +.datepicker table tr td.selected.disabled, +.datepicker table tr td.selected.disabled:hover { + background-color: #9e9e9e; + background-image: -moz-linear-gradient(to bottom, #b3b3b3, #808080); + background-image: -ms-linear-gradient(to bottom, #b3b3b3, #808080); + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#b3b3b3), to(#808080)); + background-image: -webkit-linear-gradient(to bottom, #b3b3b3, #808080); + background-image: -o-linear-gradient(to bottom, #b3b3b3, #808080); + background-image: linear-gradient(to bottom, #b3b3b3, #808080); + background-repeat: repeat-x; + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#b3b3b3', endColorstr='#808080', GradientType=0); + border-color: #808080 #808080 #595959; + border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); + filter: progid:DXImageTransform.Microsoft.gradient(enabled=false); + color: #fff; + text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); +} +.datepicker table tr td.selected:hover, +.datepicker table tr td.selected:hover:hover, +.datepicker table tr td.selected.disabled:hover, +.datepicker table tr td.selected.disabled:hover:hover, +.datepicker table tr td.selected:active, +.datepicker table tr td.selected:hover:active, +.datepicker table tr td.selected.disabled:active, +.datepicker table tr td.selected.disabled:hover:active, +.datepicker table tr td.selected.active, +.datepicker table tr td.selected:hover.active, +.datepicker table tr td.selected.disabled.active, +.datepicker table tr td.selected.disabled:hover.active, +.datepicker table tr td.selected.disabled, +.datepicker table tr td.selected:hover.disabled, +.datepicker table tr td.selected.disabled.disabled, +.datepicker table tr td.selected.disabled:hover.disabled, +.datepicker table tr td.selected[disabled], +.datepicker table tr td.selected:hover[disabled], +.datepicker table tr td.selected.disabled[disabled], +.datepicker table tr td.selected.disabled:hover[disabled] { + background-color: #808080; +} +.datepicker table tr td.selected:active, +.datepicker table tr td.selected:hover:active, +.datepicker table tr td.selected.disabled:active, +.datepicker table tr td.selected.disabled:hover:active, +.datepicker table tr td.selected.active, +.datepicker table tr td.selected:hover.active, +.datepicker table tr td.selected.disabled.active, +.datepicker table tr td.selected.disabled:hover.active { + background-color: #666666 \9; +} +.datepicker table tr td.active, +.datepicker table tr td.active:hover, +.datepicker table tr td.active.disabled, +.datepicker table tr td.active.disabled:hover { + background-color: #006dcc; + background-image: -moz-linear-gradient(to bottom, #0088cc, #0044cc); + background-image: -ms-linear-gradient(to bottom, #0088cc, #0044cc); + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#0088cc), to(#0044cc)); + background-image: -webkit-linear-gradient(to bottom, #0088cc, #0044cc); + background-image: -o-linear-gradient(to bottom, #0088cc, #0044cc); + background-image: linear-gradient(to bottom, #0088cc, #0044cc); + background-repeat: repeat-x; + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#0088cc', endColorstr='#0044cc', GradientType=0); + border-color: #0044cc #0044cc #002a80; + border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); + filter: progid:DXImageTransform.Microsoft.gradient(enabled=false); + color: #fff; + text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); +} +.datepicker table tr td.active:hover, +.datepicker table tr td.active:hover:hover, +.datepicker table tr td.active.disabled:hover, +.datepicker table tr td.active.disabled:hover:hover, +.datepicker table tr td.active:active, +.datepicker table tr td.active:hover:active, +.datepicker table tr td.active.disabled:active, +.datepicker table tr td.active.disabled:hover:active, +.datepicker table tr td.active.active, +.datepicker table tr td.active:hover.active, +.datepicker table tr td.active.disabled.active, +.datepicker table tr td.active.disabled:hover.active, +.datepicker table tr td.active.disabled, +.datepicker table tr td.active:hover.disabled, +.datepicker table tr td.active.disabled.disabled, +.datepicker table tr td.active.disabled:hover.disabled, +.datepicker table tr td.active[disabled], +.datepicker table tr td.active:hover[disabled], +.datepicker table tr td.active.disabled[disabled], +.datepicker table tr td.active.disabled:hover[disabled] { + background-color: #0044cc; +} +.datepicker table tr td.active:active, +.datepicker table tr td.active:hover:active, +.datepicker table tr td.active.disabled:active, +.datepicker table tr td.active.disabled:hover:active, +.datepicker table tr td.active.active, +.datepicker table tr td.active:hover.active, +.datepicker table tr td.active.disabled.active, +.datepicker table tr td.active.disabled:hover.active { + background-color: #003399 \9; +} +.datepicker table tr td span { + display: block; + width: 23%; + height: 54px; + line-height: 54px; + float: left; + margin: 1%; + cursor: pointer; + -webkit-border-radius: 4px; + -moz-border-radius: 4px; + border-radius: 4px; +} +.datepicker table tr td span:hover { + background: #eeeeee; +} +.datepicker table tr td span.disabled, +.datepicker table tr td span.disabled:hover { + background: none; + color: #999999; + cursor: default; +} +.datepicker table tr td span.active, +.datepicker table tr td span.active:hover, +.datepicker table tr td span.active.disabled, +.datepicker table tr td span.active.disabled:hover { + background-color: #006dcc; + background-image: -moz-linear-gradient(to bottom, #0088cc, #0044cc); + background-image: -ms-linear-gradient(to bottom, #0088cc, #0044cc); + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#0088cc), to(#0044cc)); + background-image: -webkit-linear-gradient(to bottom, #0088cc, #0044cc); + background-image: -o-linear-gradient(to bottom, #0088cc, #0044cc); + background-image: linear-gradient(to bottom, #0088cc, #0044cc); + background-repeat: repeat-x; + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#0088cc', endColorstr='#0044cc', GradientType=0); + border-color: #0044cc #0044cc #002a80; + border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); + filter: progid:DXImageTransform.Microsoft.gradient(enabled=false); + color: #fff; + text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); +} +.datepicker table tr td span.active:hover, +.datepicker table tr td span.active:hover:hover, +.datepicker table tr td span.active.disabled:hover, +.datepicker table tr td span.active.disabled:hover:hover, +.datepicker table tr td span.active:active, +.datepicker table tr td span.active:hover:active, +.datepicker table tr td span.active.disabled:active, +.datepicker table tr td span.active.disabled:hover:active, +.datepicker table tr td span.active.active, +.datepicker table tr td span.active:hover.active, +.datepicker table tr td span.active.disabled.active, +.datepicker table tr td span.active.disabled:hover.active, +.datepicker table tr td span.active.disabled, +.datepicker table tr td span.active:hover.disabled, +.datepicker table tr td span.active.disabled.disabled, +.datepicker table tr td span.active.disabled:hover.disabled, +.datepicker table tr td span.active[disabled], +.datepicker table tr td span.active:hover[disabled], +.datepicker table tr td span.active.disabled[disabled], +.datepicker table tr td span.active.disabled:hover[disabled] { + background-color: #0044cc; +} +.datepicker table tr td span.active:active, +.datepicker table tr td span.active:hover:active, +.datepicker table tr td span.active.disabled:active, +.datepicker table tr td span.active.disabled:hover:active, +.datepicker table tr td span.active.active, +.datepicker table tr td span.active:hover.active, +.datepicker table tr td span.active.disabled.active, +.datepicker table tr td span.active.disabled:hover.active { + background-color: #003399 \9; +} +.datepicker table tr td span.old, +.datepicker table tr td span.new { + color: #999999; +} +.datepicker .datepicker-switch { + width: 145px; +} +.datepicker .datepicker-switch, +.datepicker .prev, +.datepicker .next, +.datepicker tfoot tr th { + cursor: pointer; +} +.datepicker .datepicker-switch:hover, +.datepicker .prev:hover, +.datepicker .next:hover, +.datepicker tfoot tr th:hover { + background: #eeeeee; +} +.datepicker .cw { + font-size: 10px; + width: 12px; + padding: 0 2px 0 5px; + vertical-align: middle; +} +.input-append.date .add-on, +.input-prepend.date .add-on { + cursor: pointer; +} +.input-append.date .add-on i, +.input-prepend.date .add-on i { + margin-top: 3px; +} +.input-daterange input { + text-align: center; +} +.input-daterange input:first-child { + -webkit-border-radius: 3px 0 0 3px; + -moz-border-radius: 3px 0 0 3px; + border-radius: 3px 0 0 3px; +} +.input-daterange input:last-child { + -webkit-border-radius: 0 3px 3px 0; + -moz-border-radius: 0 3px 3px 0; + border-radius: 0 3px 3px 0; +} +.input-daterange .add-on { + display: inline-block; + width: auto; + min-width: 16px; + height: 18px; + padding: 4px 5px; + font-weight: normal; + line-height: 18px; + text-align: center; + text-shadow: 0 1px 0 #ffffff; + vertical-align: middle; + background-color: #eeeeee; + border: 1px solid #ccc; + margin-left: -5px; + margin-right: -5px; +} diff --git a/public/assets/bootstrap-datepicker/js/bootstrap-datepicker.js b/public/assets/bootstrap-datepicker/js/bootstrap-datepicker.js new file mode 100755 index 000000000..a1647da5e --- /dev/null +++ b/public/assets/bootstrap-datepicker/js/bootstrap-datepicker.js @@ -0,0 +1,1918 @@ +/*! + * Datepicker for Bootstrap v1.5.1 (https://github.com/eternicode/bootstrap-datepicker) + * + * Copyright 2012 Stefan Petre + * Improvements by Andrew Rowls + * Licensed under the Apache License v2.0 (http://www.apache.org/licenses/LICENSE-2.0) + */(function(factory){ + if (typeof define === "function" && define.amd) { + define(["jquery"], factory); + } else if (typeof exports === 'object') { + factory(require('jquery')); + } else { + factory(jQuery); + } +}(function($, undefined){ + + function UTCDate(){ + return new Date(Date.UTC.apply(Date, arguments)); + } + function UTCToday(){ + var today = new Date(); + return UTCDate(today.getFullYear(), today.getMonth(), today.getDate()); + } + function isUTCEquals(date1, date2) { + return ( + date1.getUTCFullYear() === date2.getUTCFullYear() && + date1.getUTCMonth() === date2.getUTCMonth() && + date1.getUTCDate() === date2.getUTCDate() + ); + } + function alias(method){ + return function(){ + return this[method].apply(this, arguments); + }; + } + function isValidDate(d) { + return d && !isNaN(d.getTime()); + } + + var DateArray = (function(){ + var extras = { + get: function(i){ + return this.slice(i)[0]; + }, + contains: function(d){ + // Array.indexOf is not cross-browser; + // $.inArray doesn't work with Dates + var val = d && d.valueOf(); + for (var i=0, l=this.length; i < l; i++) + if (this[i].valueOf() === val) + return i; + return -1; + }, + remove: function(i){ + this.splice(i,1); + }, + replace: function(new_array){ + if (!new_array) + return; + if (!$.isArray(new_array)) + new_array = [new_array]; + this.clear(); + this.push.apply(this, new_array); + }, + clear: function(){ + this.length = 0; + }, + copy: function(){ + var a = new DateArray(); + a.replace(this); + return a; + } + }; + + return function(){ + var a = []; + a.push.apply(a, arguments); + $.extend(a, extras); + return a; + }; + })(); + + + // Picker object + + var Datepicker = function(element, options){ + $(element).data('datepicker', this); + this._process_options(options); + + this.dates = new DateArray(); + this.viewDate = this.o.defaultViewDate; + this.focusDate = null; + + this.element = $(element); + this.isInline = false; + this.isInput = this.element.is('input'); + this.component = this.element.hasClass('date') ? this.element.find('.add-on, .input-group-addon, .btn') : false; + this.hasInput = this.component && this.element.find('input').length; + if (this.component && this.component.length === 0) + this.component = false; + + this.picker = $(DPGlobal.template); + this._buildEvents(); + this._attachEvents(); + + if (this.isInline){ + this.picker.addClass('datepicker-inline').appendTo(this.element); + } + else { + this.picker.addClass('datepicker-dropdown dropdown-menu'); + } + + if (this.o.rtl){ + this.picker.addClass('datepicker-rtl'); + } + + this.viewMode = this.o.startView; + + if (this.o.calendarWeeks) + this.picker.find('thead .datepicker-title, tfoot .today, tfoot .clear') + .attr('colspan', function(i, val){ + return parseInt(val) + 1; + }); + + this._allow_update = false; + + this.setStartDate(this._o.startDate); + this.setEndDate(this._o.endDate); + this.setDaysOfWeekDisabled(this.o.daysOfWeekDisabled); + this.setDaysOfWeekHighlighted(this.o.daysOfWeekHighlighted); + this.setDatesDisabled(this.o.datesDisabled); + + this.fillDow(); + this.fillMonths(); + + this._allow_update = true; + + this.update(); + this.showMode(); + + if (this.isInline){ + this.show(); + } + }; + + Datepicker.prototype = { + constructor: Datepicker, + + _process_options: function(opts){ + // Store raw options for reference + this._o = $.extend({}, this._o, opts); + // Processed options + var o = this.o = $.extend({}, this._o); + + // Check if "de-DE" style date is available, if not language should + // fallback to 2 letter code eg "de" + var lang = o.language; + if (!dates[lang]){ + lang = lang.split('-')[0]; + if (!dates[lang]) + lang = defaults.language; + } + o.language = lang; + + switch (o.startView){ + case 2: + case 'decade': + o.startView = 2; + break; + case 1: + case 'year': + o.startView = 1; + break; + default: + o.startView = 0; + } + + switch (o.minViewMode){ + case 1: + case 'months': + o.minViewMode = 1; + break; + case 2: + case 'years': + o.minViewMode = 2; + break; + default: + o.minViewMode = 0; + } + + switch (o.maxViewMode) { + case 0: + case 'days': + o.maxViewMode = 0; + break; + case 1: + case 'months': + o.maxViewMode = 1; + break; + default: + o.maxViewMode = 2; + } + + o.startView = Math.min(o.startView, o.maxViewMode); + o.startView = Math.max(o.startView, o.minViewMode); + + // true, false, or Number > 0 + if (o.multidate !== true){ + o.multidate = Number(o.multidate) || false; + if (o.multidate !== false) + o.multidate = Math.max(0, o.multidate); + } + o.multidateSeparator = String(o.multidateSeparator); + + o.weekStart %= 7; + o.weekEnd = (o.weekStart + 6) % 7; + + var format = DPGlobal.parseFormat(o.format); + if (o.startDate !== -Infinity){ + if (!!o.startDate){ + if (o.startDate instanceof Date) + o.startDate = this._local_to_utc(this._zero_time(o.startDate)); + else + o.startDate = DPGlobal.parseDate(o.startDate, format, o.language); + } + else { + o.startDate = -Infinity; + } + } + if (o.endDate !== Infinity){ + if (!!o.endDate){ + if (o.endDate instanceof Date) + o.endDate = this._local_to_utc(this._zero_time(o.endDate)); + else + o.endDate = DPGlobal.parseDate(o.endDate, format, o.language); + } + else { + o.endDate = Infinity; + } + } + + o.daysOfWeekDisabled = o.daysOfWeekDisabled||[]; + if (!$.isArray(o.daysOfWeekDisabled)) + o.daysOfWeekDisabled = o.daysOfWeekDisabled.split(/[,\s]*/); + o.daysOfWeekDisabled = $.map(o.daysOfWeekDisabled, function(d){ + return parseInt(d, 10); + }); + + o.daysOfWeekHighlighted = o.daysOfWeekHighlighted||[]; + if (!$.isArray(o.daysOfWeekHighlighted)) + o.daysOfWeekHighlighted = o.daysOfWeekHighlighted.split(/[,\s]*/); + o.daysOfWeekHighlighted = $.map(o.daysOfWeekHighlighted, function(d){ + return parseInt(d, 10); + }); + + o.datesDisabled = o.datesDisabled||[]; + if (!$.isArray(o.datesDisabled)) { + var datesDisabled = []; + datesDisabled.push(DPGlobal.parseDate(o.datesDisabled, format, o.language)); + o.datesDisabled = datesDisabled; + } + o.datesDisabled = $.map(o.datesDisabled,function(d){ + return DPGlobal.parseDate(d, format, o.language); + }); + + var plc = String(o.orientation).toLowerCase().split(/\s+/g), + _plc = o.orientation.toLowerCase(); + plc = $.grep(plc, function(word){ + return /^auto|left|right|top|bottom$/.test(word); + }); + o.orientation = {x: 'auto', y: 'auto'}; + if (!_plc || _plc === 'auto') + ; // no action + else if (plc.length === 1){ + switch (plc[0]){ + case 'top': + case 'bottom': + o.orientation.y = plc[0]; + break; + case 'left': + case 'right': + o.orientation.x = plc[0]; + break; + } + } + else { + _plc = $.grep(plc, function(word){ + return /^left|right$/.test(word); + }); + o.orientation.x = _plc[0] || 'auto'; + + _plc = $.grep(plc, function(word){ + return /^top|bottom$/.test(word); + }); + o.orientation.y = _plc[0] || 'auto'; + } + if (o.defaultViewDate) { + var year = o.defaultViewDate.year || new Date().getFullYear(); + var month = o.defaultViewDate.month || 0; + var day = o.defaultViewDate.day || 1; + o.defaultViewDate = UTCDate(year, month, day); + } else { + o.defaultViewDate = UTCToday(); + } + }, + _events: [], + _secondaryEvents: [], + _applyEvents: function(evs){ + for (var i=0, el, ch, ev; i < evs.length; i++){ + el = evs[i][0]; + if (evs[i].length === 2){ + ch = undefined; + ev = evs[i][1]; + } + else if (evs[i].length === 3){ + ch = evs[i][1]; + ev = evs[i][2]; + } + el.on(ev, ch); + } + }, + _unapplyEvents: function(evs){ + for (var i=0, el, ev, ch; i < evs.length; i++){ + el = evs[i][0]; + if (evs[i].length === 2){ + ch = undefined; + ev = evs[i][1]; + } + else if (evs[i].length === 3){ + ch = evs[i][1]; + ev = evs[i][2]; + } + el.off(ev, ch); + } + }, + _buildEvents: function(){ + var events = { + keyup: $.proxy(function(e){ + if ($.inArray(e.keyCode, [27, 37, 39, 38, 40, 32, 13, 9]) === -1) + this.update(); + }, this), + keydown: $.proxy(this.keydown, this), + paste: $.proxy(this.paste, this) + }; + + if (this.o.showOnFocus === true) { + events.focus = $.proxy(this.show, this); + } + + if (this.isInput) { // single input + this._events = [ + [this.element, events] + ]; + } + else if (this.component && this.hasInput) { // component: input + button + this._events = [ + // For components that are not readonly, allow keyboard nav + [this.element.find('input'), events], + [this.component, { + click: $.proxy(this.show, this) + }] + ]; + } + else if (this.element.is('div')){ // inline datepicker + this.isInline = true; + } + else { + this._events = [ + [this.element, { + click: $.proxy(this.show, this) + }] + ]; + } + this._events.push( + // Component: listen for blur on element descendants + [this.element, '*', { + blur: $.proxy(function(e){ + this._focused_from = e.target; + }, this) + }], + // Input: listen for blur on element + [this.element, { + blur: $.proxy(function(e){ + this._focused_from = e.target; + }, this) + }] + ); + + if (this.o.immediateUpdates) { + // Trigger input updates immediately on changed year/month + this._events.push([this.element, { + 'changeYear changeMonth': $.proxy(function(e){ + this.update(e.date); + }, this) + }]); + } + + this._secondaryEvents = [ + [this.picker, { + click: $.proxy(this.click, this) + }], + [$(window), { + resize: $.proxy(this.place, this) + }], + [$(document), { + mousedown: $.proxy(function(e){ + // Clicked outside the datepicker, hide it + if (!( + this.element.is(e.target) || + this.element.find(e.target).length || + this.picker.is(e.target) || + this.picker.find(e.target).length || + this.picker.hasClass('datepicker-inline') + )){ + this.hide(); + } + }, this) + }] + ]; + }, + _attachEvents: function(){ + this._detachEvents(); + this._applyEvents(this._events); + }, + _detachEvents: function(){ + this._unapplyEvents(this._events); + }, + _attachSecondaryEvents: function(){ + this._detachSecondaryEvents(); + this._applyEvents(this._secondaryEvents); + }, + _detachSecondaryEvents: function(){ + this._unapplyEvents(this._secondaryEvents); + }, + _trigger: function(event, altdate){ + var date = altdate || this.dates.get(-1), + local_date = this._utc_to_local(date); + + this.element.trigger({ + type: event, + date: local_date, + dates: $.map(this.dates, this._utc_to_local), + format: $.proxy(function(ix, format){ + if (arguments.length === 0){ + ix = this.dates.length - 1; + format = this.o.format; + } + else if (typeof ix === 'string'){ + format = ix; + ix = this.dates.length - 1; + } + format = format || this.o.format; + var date = this.dates.get(ix); + return DPGlobal.formatDate(date, format, this.o.language); + }, this) + }); + }, + + show: function(){ + var element = this.component ? this.element.find('input') : this.element; + if (element.attr('readonly') && this.o.enableOnReadonly === false) + return; + if (!this.isInline) + this.picker.appendTo(this.o.container); + this.place(); + this.picker.show(); + this._attachSecondaryEvents(); + this._trigger('show'); + if ((window.navigator.msMaxTouchPoints || 'ontouchstart' in document) && this.o.disableTouchKeyboard) { + $(this.element).blur(); + } + return this; + }, + + hide: function(){ + if (this.isInline) + return this; + if (!this.picker.is(':visible')) + return this; + this.focusDate = null; + this.picker.hide().detach(); + this._detachSecondaryEvents(); + this.viewMode = this.o.startView; + this.showMode(); + + if ( + this.o.forceParse && + ( + this.isInput && this.element.val() || + this.hasInput && this.element.find('input').val() + ) + ) + this.setValue(); + this._trigger('hide'); + return this; + }, + + remove: function(){ + this.hide(); + this._detachEvents(); + this._detachSecondaryEvents(); + this.picker.remove(); + delete this.element.data().datepicker; + if (!this.isInput){ + delete this.element.data().date; + } + return this; + }, + + paste: function(evt){ + var dateString; + if (evt.originalEvent.clipboardData && evt.originalEvent.clipboardData.types + && $.inArray('text/plain', evt.originalEvent.clipboardData.types) !== -1) { + dateString = evt.originalEvent.clipboardData.getData('text/plain'); + } + else if (window.clipboardData) { + dateString = window.clipboardData.getData('Text'); + } + else { + return; + } + this.setDate(dateString); + this.update(); + evt.preventDefault(); + }, + + _utc_to_local: function(utc){ + return utc && new Date(utc.getTime() + (utc.getTimezoneOffset()*60000)); + }, + _local_to_utc: function(local){ + return local && new Date(local.getTime() - (local.getTimezoneOffset()*60000)); + }, + _zero_time: function(local){ + return local && new Date(local.getFullYear(), local.getMonth(), local.getDate()); + }, + _zero_utc_time: function(utc){ + return utc && new Date(Date.UTC(utc.getUTCFullYear(), utc.getUTCMonth(), utc.getUTCDate())); + }, + + getDates: function(){ + return $.map(this.dates, this._utc_to_local); + }, + + getUTCDates: function(){ + return $.map(this.dates, function(d){ + return new Date(d); + }); + }, + + getDate: function(){ + return this._utc_to_local(this.getUTCDate()); + }, + + getUTCDate: function(){ + var selected_date = this.dates.get(-1); + if (typeof selected_date !== 'undefined') { + return new Date(selected_date); + } else { + return null; + } + }, + + clearDates: function(){ + var element; + if (this.isInput) { + element = this.element; + } else if (this.component) { + element = this.element.find('input'); + } + + if (element) { + element.val(''); + } + + this.update(); + this._trigger('changeDate'); + + if (this.o.autoclose) { + this.hide(); + } + }, + setDates: function(){ + var args = $.isArray(arguments[0]) ? arguments[0] : arguments; + this.update.apply(this, args); + this._trigger('changeDate'); + this.setValue(); + return this; + }, + + setUTCDates: function(){ + var args = $.isArray(arguments[0]) ? arguments[0] : arguments; + this.update.apply(this, $.map(args, this._utc_to_local)); + this._trigger('changeDate'); + this.setValue(); + return this; + }, + + setDate: alias('setDates'), + setUTCDate: alias('setUTCDates'), + + setValue: function(){ + var formatted = this.getFormattedDate(); + if (!this.isInput){ + if (this.component){ + this.element.find('input').val(formatted); + } + } + else { + this.element.val(formatted); + } + return this; + }, + + getFormattedDate: function(format){ + if (format === undefined) + format = this.o.format; + + var lang = this.o.language; + return $.map(this.dates, function(d){ + return DPGlobal.formatDate(d, format, lang); + }).join(this.o.multidateSeparator); + }, + + setStartDate: function(startDate){ + this._process_options({startDate: startDate}); + this.update(); + this.updateNavArrows(); + return this; + }, + + setEndDate: function(endDate){ + this._process_options({endDate: endDate}); + this.update(); + this.updateNavArrows(); + return this; + }, + + setDaysOfWeekDisabled: function(daysOfWeekDisabled){ + this._process_options({daysOfWeekDisabled: daysOfWeekDisabled}); + this.update(); + this.updateNavArrows(); + return this; + }, + + setDaysOfWeekHighlighted: function(daysOfWeekHighlighted){ + this._process_options({daysOfWeekHighlighted: daysOfWeekHighlighted}); + this.update(); + return this; + }, + + setDatesDisabled: function(datesDisabled){ + this._process_options({datesDisabled: datesDisabled}); + this.update(); + this.updateNavArrows(); + }, + + place: function(){ + if (this.isInline) + return this; + var calendarWidth = this.picker.outerWidth(), + calendarHeight = this.picker.outerHeight(), + visualPadding = 10, + container = $(this.o.container), + windowWidth = container.width(), + scrollTop = this.o.container === 'body' ? $(document).scrollTop() : container.scrollTop(), + appendOffset = container.offset(); + + var parentsZindex = []; + this.element.parents().each(function(){ + var itemZIndex = $(this).css('z-index'); + if (itemZIndex !== 'auto' && itemZIndex !== 0) parentsZindex.push(parseInt(itemZIndex)); + }); + var zIndex = Math.max.apply(Math, parentsZindex) + this.o.zIndexOffset; + var offset = this.component ? this.component.parent().offset() : this.element.offset(); + var height = this.component ? this.component.outerHeight(true) : this.element.outerHeight(false); + var width = this.component ? this.component.outerWidth(true) : this.element.outerWidth(false); + var left = offset.left - appendOffset.left, + top = offset.top - appendOffset.top; + + if (this.o.container !== 'body') { + top += scrollTop; + } + + this.picker.removeClass( + 'datepicker-orient-top datepicker-orient-bottom '+ + 'datepicker-orient-right datepicker-orient-left' + ); + + if (this.o.orientation.x !== 'auto'){ + this.picker.addClass('datepicker-orient-' + this.o.orientation.x); + if (this.o.orientation.x === 'right') + left -= calendarWidth - width; + } + // auto x orientation is best-placement: if it crosses a window + // edge, fudge it sideways + else { + if (offset.left < 0) { + // component is outside the window on the left side. Move it into visible range + this.picker.addClass('datepicker-orient-left'); + left -= offset.left - visualPadding; + } else if (left + calendarWidth > windowWidth) { + // the calendar passes the widow right edge. Align it to component right side + this.picker.addClass('datepicker-orient-right'); + left += width - calendarWidth; + } else { + // Default to left + this.picker.addClass('datepicker-orient-left'); + } + } + + // auto y orientation is best-situation: top or bottom, no fudging, + // decision based on which shows more of the calendar + var yorient = this.o.orientation.y, + top_overflow; + if (yorient === 'auto'){ + top_overflow = -scrollTop + top - calendarHeight; + yorient = top_overflow < 0 ? 'bottom' : 'top'; + } + + this.picker.addClass('datepicker-orient-' + yorient); + if (yorient === 'top') + top -= calendarHeight + parseInt(this.picker.css('padding-top')); + else + top += height; + + if (this.o.rtl) { + var right = windowWidth - (left + width); + this.picker.css({ + top: top, + right: right, + zIndex: zIndex + }); + } else { + this.picker.css({ + top: top, + left: left, + zIndex: zIndex + }); + } + return this; + }, + + _allow_update: true, + update: function(){ + if (!this._allow_update) + return this; + + var oldDates = this.dates.copy(), + dates = [], + fromArgs = false; + if (arguments.length){ + $.each(arguments, $.proxy(function(i, date){ + if (date instanceof Date) + date = this._local_to_utc(date); + dates.push(date); + }, this)); + fromArgs = true; + } + else { + dates = this.isInput + ? this.element.val() + : this.element.data('date') || this.element.find('input').val(); + if (dates && this.o.multidate) + dates = dates.split(this.o.multidateSeparator); + else + dates = [dates]; + delete this.element.data().date; + } + + dates = $.map(dates, $.proxy(function(date){ + return DPGlobal.parseDate(date, this.o.format, this.o.language); + }, this)); + dates = $.grep(dates, $.proxy(function(date){ + return ( + !this.dateWithinRange(date) || + !date + ); + }, this), true); + this.dates.replace(dates); + + if (this.dates.length) + this.viewDate = new Date(this.dates.get(-1)); + else if (this.viewDate < this.o.startDate) + this.viewDate = new Date(this.o.startDate); + else if (this.viewDate > this.o.endDate) + this.viewDate = new Date(this.o.endDate); + else + this.viewDate = this.o.defaultViewDate; + + if (fromArgs){ + // setting date by clicking + this.setValue(); + } + else if (dates.length){ + // setting date by typing + if (String(oldDates) !== String(this.dates)) + this._trigger('changeDate'); + } + if (!this.dates.length && oldDates.length) + this._trigger('clearDate'); + + this.fill(); + this.element.change(); + return this; + }, + + fillDow: function(){ + var dowCnt = this.o.weekStart, + html = ''; + if (this.o.calendarWeeks){ + this.picker.find('.datepicker-days .datepicker-switch') + .attr('colspan', function(i, val){ + return parseInt(val) + 1; + }); + html += ' '; + } + while (dowCnt < this.o.weekStart + 7){ + html += ''+dates[this.o.language].daysMin[(dowCnt++)%7]+''; + } + html += ''; + this.picker.find('.datepicker-days thead').append(html); + }, + + fillMonths: function(){ + var html = '', + i = 0; + while (i < 12){ + html += ''+dates[this.o.language].monthsShort[i++]+''; + } + this.picker.find('.datepicker-months td').html(html); + }, + + setRange: function(range){ + if (!range || !range.length) + delete this.range; + else + this.range = $.map(range, function(d){ + return d.valueOf(); + }); + this.fill(); + }, + + getClassNames: function(date){ + var cls = [], + year = this.viewDate.getUTCFullYear(), + month = this.viewDate.getUTCMonth(), + today = new Date(); + if (date.getUTCFullYear() < year || (date.getUTCFullYear() === year && date.getUTCMonth() < month)){ + cls.push('old'); + } + else if (date.getUTCFullYear() > year || (date.getUTCFullYear() === year && date.getUTCMonth() > month)){ + cls.push('new'); + } + if (this.focusDate && date.valueOf() === this.focusDate.valueOf()) + cls.push('focused'); + // Compare internal UTC date with local today, not UTC today + if (this.o.todayHighlight && + date.getUTCFullYear() === today.getFullYear() && + date.getUTCMonth() === today.getMonth() && + date.getUTCDate() === today.getDate()){ + cls.push('today'); + } + if (this.dates.contains(date) !== -1) + cls.push('active'); + if (!this.dateWithinRange(date) || this.dateIsDisabled(date)){ + cls.push('disabled'); + } + if ($.inArray(date.getUTCDay(), this.o.daysOfWeekHighlighted) !== -1){ + cls.push('highlighted'); + } + + if (this.range){ + if (date > this.range[0] && date < this.range[this.range.length-1]){ + cls.push('range'); + } + if ($.inArray(date.valueOf(), this.range) !== -1){ + cls.push('selected'); + } + if (date.valueOf() === this.range[0]){ + cls.push('range-start'); + } + if (date.valueOf() === this.range[this.range.length-1]){ + cls.push('range-end'); + } + } + return cls; + }, + + fill: function(){ + var d = new Date(this.viewDate), + year = d.getUTCFullYear(), + month = d.getUTCMonth(), + startYear = this.o.startDate !== -Infinity ? this.o.startDate.getUTCFullYear() : -Infinity, + startMonth = this.o.startDate !== -Infinity ? this.o.startDate.getUTCMonth() : -Infinity, + endYear = this.o.endDate !== Infinity ? this.o.endDate.getUTCFullYear() : Infinity, + endMonth = this.o.endDate !== Infinity ? this.o.endDate.getUTCMonth() : Infinity, + todaytxt = dates[this.o.language].today || dates['en'].today || '', + cleartxt = dates[this.o.language].clear || dates['en'].clear || '', + titleFormat = dates[this.o.language].titleFormat || dates['en'].titleFormat, + tooltip; + if (isNaN(year) || isNaN(month)) + return; + this.picker.find('.datepicker-days thead .datepicker-switch') + .text(DPGlobal.formatDate(new UTCDate(year, month), titleFormat, this.o.language)); + this.picker.find('tfoot .today') + .text(todaytxt) + .toggle(this.o.todayBtn !== false); + this.picker.find('tfoot .clear') + .text(cleartxt) + .toggle(this.o.clearBtn !== false); + this.picker.find('thead .datepicker-title') + .text(this.o.title) + .toggle(this.o.title !== ''); + this.updateNavArrows(); + this.fillMonths(); + var prevMonth = UTCDate(year, month-1, 28), + day = DPGlobal.getDaysInMonth(prevMonth.getUTCFullYear(), prevMonth.getUTCMonth()); + prevMonth.setUTCDate(day); + prevMonth.setUTCDate(day - (prevMonth.getUTCDay() - this.o.weekStart + 7)%7); + var nextMonth = new Date(prevMonth); + if (prevMonth.getUTCFullYear() < 100){ + nextMonth.setUTCFullYear(prevMonth.getUTCFullYear()); + } + nextMonth.setUTCDate(nextMonth.getUTCDate() + 42); + nextMonth = nextMonth.valueOf(); + var html = []; + var clsName; + while (prevMonth.valueOf() < nextMonth){ + if (prevMonth.getUTCDay() === this.o.weekStart){ + html.push(''); + if (this.o.calendarWeeks){ + // ISO 8601: First week contains first thursday. + // ISO also states week starts on Monday, but we can be more abstract here. + var + // Start of current week: based on weekstart/current date + ws = new Date(+prevMonth + (this.o.weekStart - prevMonth.getUTCDay() - 7) % 7 * 864e5), + // Thursday of this week + th = new Date(Number(ws) + (7 + 4 - ws.getUTCDay()) % 7 * 864e5), + // First Thursday of year, year from thursday + yth = new Date(Number(yth = UTCDate(th.getUTCFullYear(), 0, 1)) + (7 + 4 - yth.getUTCDay())%7*864e5), + // Calendar week: ms between thursdays, div ms per day, div 7 days + calWeek = (th - yth) / 864e5 / 7 + 1; + html.push(''+ calWeek +''); + + } + } + clsName = this.getClassNames(prevMonth); + clsName.push('day'); + + if (this.o.beforeShowDay !== $.noop){ + var before = this.o.beforeShowDay(this._utc_to_local(prevMonth)); + if (before === undefined) + before = {}; + else if (typeof(before) === 'boolean') + before = {enabled: before}; + else if (typeof(before) === 'string') + before = {classes: before}; + if (before.enabled === false) + clsName.push('disabled'); + if (before.classes) + clsName = clsName.concat(before.classes.split(/\s+/)); + if (before.tooltip) + tooltip = before.tooltip; + } + + clsName = $.unique(clsName); + html.push(''+prevMonth.getUTCDate() + ''); + tooltip = null; + if (prevMonth.getUTCDay() === this.o.weekEnd){ + html.push(''); + } + prevMonth.setUTCDate(prevMonth.getUTCDate()+1); + } + this.picker.find('.datepicker-days tbody').empty().append(html.join('')); + + var monthsTitle = dates[this.o.language].monthsTitle || dates['en'].monthsTitle || 'Months'; + var months = this.picker.find('.datepicker-months') + .find('.datepicker-switch') + .text(this.o.maxViewMode < 2 ? monthsTitle : year) + .end() + .find('span').removeClass('active'); + + $.each(this.dates, function(i, d){ + if (d.getUTCFullYear() === year) + months.eq(d.getUTCMonth()).addClass('active'); + }); + + if (year < startYear || year > endYear){ + months.addClass('disabled'); + } + if (year === startYear){ + months.slice(0, startMonth).addClass('disabled'); + } + if (year === endYear){ + months.slice(endMonth+1).addClass('disabled'); + } + + if (this.o.beforeShowMonth !== $.noop){ + var that = this; + $.each(months, function(i, month){ + if (!$(month).hasClass('disabled')) { + var moDate = new Date(year, i, 1); + var before = that.o.beforeShowMonth(moDate); + if (before === false) + $(month).addClass('disabled'); + } + }); + } + + html = ''; + year = parseInt(year/10, 10) * 10; + var yearCont = this.picker.find('.datepicker-years') + .find('.datepicker-switch') + .text(year + '-' + (year + 9)) + .end() + .find('td'); + year -= 1; + var years = $.map(this.dates, function(d){ + return d.getUTCFullYear(); + }), + classes; + for (var i = -1; i < 11; i++){ + classes = ['year']; + tooltip = null; + + if (i === -1) + classes.push('old'); + else if (i === 10) + classes.push('new'); + if ($.inArray(year, years) !== -1) + classes.push('active'); + if (year < startYear || year > endYear) + classes.push('disabled'); + + if (this.o.beforeShowYear !== $.noop) { + var yrBefore = this.o.beforeShowYear(new Date(year, 0, 1)); + if (yrBefore === undefined) + yrBefore = {}; + else if (typeof(yrBefore) === 'boolean') + yrBefore = {enabled: yrBefore}; + else if (typeof(yrBefore) === 'string') + yrBefore = {classes: yrBefore}; + if (yrBefore.enabled === false) + classes.push('disabled'); + if (yrBefore.classes) + classes = classes.concat(yrBefore.classes.split(/\s+/)); + if (yrBefore.tooltip) + tooltip = yrBefore.tooltip; + } + + html += '' + year + ''; + year += 1; + } + yearCont.html(html); + }, + + updateNavArrows: function(){ + if (!this._allow_update) + return; + + var d = new Date(this.viewDate), + year = d.getUTCFullYear(), + month = d.getUTCMonth(); + switch (this.viewMode){ + case 0: + if (this.o.startDate !== -Infinity && year <= this.o.startDate.getUTCFullYear() && month <= this.o.startDate.getUTCMonth()){ + this.picker.find('.prev').css({visibility: 'hidden'}); + } + else { + this.picker.find('.prev').css({visibility: 'visible'}); + } + if (this.o.endDate !== Infinity && year >= this.o.endDate.getUTCFullYear() && month >= this.o.endDate.getUTCMonth()){ + this.picker.find('.next').css({visibility: 'hidden'}); + } + else { + this.picker.find('.next').css({visibility: 'visible'}); + } + break; + case 1: + case 2: + if (this.o.startDate !== -Infinity && year <= this.o.startDate.getUTCFullYear() || this.o.maxViewMode < 2){ + this.picker.find('.prev').css({visibility: 'hidden'}); + } + else { + this.picker.find('.prev').css({visibility: 'visible'}); + } + if (this.o.endDate !== Infinity && year >= this.o.endDate.getUTCFullYear() || this.o.maxViewMode < 2){ + this.picker.find('.next').css({visibility: 'hidden'}); + } + else { + this.picker.find('.next').css({visibility: 'visible'}); + } + break; + } + }, + + click: function(e){ + e.preventDefault(); + e.stopPropagation(); + var target = $(e.target).closest('span, td, th'), + year, month, day; + if (target.length === 1){ + switch (target[0].nodeName.toLowerCase()){ + case 'th': + switch (target[0].className){ + case 'datepicker-switch': + this.showMode(1); + break; + case 'prev': + case 'next': + var dir = DPGlobal.modes[this.viewMode].navStep * (target[0].className === 'prev' ? -1 : 1); + switch (this.viewMode){ + case 0: + this.viewDate = this.moveMonth(this.viewDate, dir); + this._trigger('changeMonth', this.viewDate); + break; + case 1: + case 2: + this.viewDate = this.moveYear(this.viewDate, dir); + if (this.viewMode === 1) + this._trigger('changeYear', this.viewDate); + break; + } + this.fill(); + break; + case 'today': + this.showMode(-2); + var which = this.o.todayBtn === 'linked' ? null : 'view'; + this._setDate(UTCToday(), which); + break; + case 'clear': + this.clearDates(); + break; + } + break; + case 'span': + if (!target.hasClass('disabled')){ + this.viewDate.setUTCDate(1); + if (target.hasClass('month')){ + day = 1; + month = target.parent().find('span').index(target); + year = this.viewDate.getUTCFullYear(); + this.viewDate.setUTCMonth(month); + this._trigger('changeMonth', this.viewDate); + if (this.o.minViewMode === 1){ + this._setDate(UTCDate(year, month, day)); + this.showMode(); + } else { + this.showMode(-1); + } + } + else { + day = 1; + month = 0; + year = parseInt(target.text(), 10)||0; + this.viewDate.setUTCFullYear(year); + this._trigger('changeYear', this.viewDate); + if (this.o.minViewMode === 2){ + this._setDate(UTCDate(year, month, day)); + } + this.showMode(-1); + } + this.fill(); + } + break; + case 'td': + if (target.hasClass('day') && !target.hasClass('disabled')){ + day = parseInt(target.text(), 10)||1; + year = this.viewDate.getUTCFullYear(); + month = this.viewDate.getUTCMonth(); + if (target.hasClass('old')){ + if (month === 0){ + month = 11; + year -= 1; + } + else { + month -= 1; + } + } + else if (target.hasClass('new')){ + if (month === 11){ + month = 0; + year += 1; + } + else { + month += 1; + } + } + this._setDate(UTCDate(year, month, day)); + } + break; + } + } + if (this.picker.is(':visible') && this._focused_from){ + $(this._focused_from).focus(); + } + delete this._focused_from; + }, + + _toggle_multidate: function(date){ + var ix = this.dates.contains(date); + if (!date){ + this.dates.clear(); + } + + if (ix !== -1){ + if (this.o.multidate === true || this.o.multidate > 1 || this.o.toggleActive){ + this.dates.remove(ix); + } + } else if (this.o.multidate === false) { + this.dates.clear(); + this.dates.push(date); + } + else { + this.dates.push(date); + } + + if (typeof this.o.multidate === 'number') + while (this.dates.length > this.o.multidate) + this.dates.remove(0); + }, + + _setDate: function(date, which){ + if (!which || which === 'date') + this._toggle_multidate(date && new Date(date)); + if (!which || which === 'view') + this.viewDate = date && new Date(date); + + this.fill(); + this.setValue(); + if (!which || which !== 'view') { + this._trigger('changeDate'); + } + var element; + if (this.isInput){ + element = this.element; + } + else if (this.component){ + element = this.element.find('input'); + } + if (element){ + element.change(); + } + if (this.o.autoclose && (!which || which === 'date')){ + this.hide(); + } + }, + + moveDay: function(date, dir){ + var newDate = new Date(date); + newDate.setUTCDate(date.getUTCDate() + dir); + + return newDate; + }, + + moveWeek: function(date, dir){ + return this.moveDay(date, dir * 7); + }, + + moveMonth: function(date, dir){ + if (!isValidDate(date)) + return this.o.defaultViewDate; + if (!dir) + return date; + var new_date = new Date(date.valueOf()), + day = new_date.getUTCDate(), + month = new_date.getUTCMonth(), + mag = Math.abs(dir), + new_month, test; + dir = dir > 0 ? 1 : -1; + if (mag === 1){ + test = dir === -1 + // If going back one month, make sure month is not current month + // (eg, Mar 31 -> Feb 31 == Feb 28, not Mar 02) + ? function(){ + return new_date.getUTCMonth() === month; + } + // If going forward one month, make sure month is as expected + // (eg, Jan 31 -> Feb 31 == Feb 28, not Mar 02) + : function(){ + return new_date.getUTCMonth() !== new_month; + }; + new_month = month + dir; + new_date.setUTCMonth(new_month); + // Dec -> Jan (12) or Jan -> Dec (-1) -- limit expected date to 0-11 + if (new_month < 0 || new_month > 11) + new_month = (new_month + 12) % 12; + } + else { + // For magnitudes >1, move one month at a time... + for (var i=0; i < mag; i++) + // ...which might decrease the day (eg, Jan 31 to Feb 28, etc)... + new_date = this.moveMonth(new_date, dir); + // ...then reset the day, keeping it in the new month + new_month = new_date.getUTCMonth(); + new_date.setUTCDate(day); + test = function(){ + return new_month !== new_date.getUTCMonth(); + }; + } + // Common date-resetting loop -- if date is beyond end of month, make it + // end of month + while (test()){ + new_date.setUTCDate(--day); + new_date.setUTCMonth(new_month); + } + return new_date; + }, + + moveYear: function(date, dir){ + return this.moveMonth(date, dir*12); + }, + + moveAvailableDate: function(date, dir, fn){ + do { + date = this[fn](date, dir); + + if (!this.dateWithinRange(date)) + return false; + + fn = 'moveDay'; + } + while (this.dateIsDisabled(date)); + + return date; + }, + + weekOfDateIsDisabled: function(date){ + return $.inArray(date.getUTCDay(), this.o.daysOfWeekDisabled) !== -1; + }, + + dateIsDisabled: function(date){ + return ( + this.weekOfDateIsDisabled(date) || + $.grep(this.o.datesDisabled, function(d){ + return isUTCEquals(date, d); + }).length > 0 + ); + }, + + dateWithinRange: function(date){ + return date >= this.o.startDate && date <= this.o.endDate; + }, + + keydown: function(e){ + if (!this.picker.is(':visible')){ + if (e.keyCode === 40 || e.keyCode === 27) { // allow down to re-show picker + this.show(); + e.stopPropagation(); + } + return; + } + var dateChanged = false, + dir, newViewDate, + focusDate = this.focusDate || this.viewDate; + switch (e.keyCode){ + case 27: // escape + if (this.focusDate){ + this.focusDate = null; + this.viewDate = this.dates.get(-1) || this.viewDate; + this.fill(); + } + else + this.hide(); + e.preventDefault(); + e.stopPropagation(); + break; + case 37: // left + case 38: // up + case 39: // right + case 40: // down + if (!this.o.keyboardNavigation || this.o.daysOfWeekDisabled.length === 7) + break; + dir = e.keyCode === 37 || e.keyCode === 38 ? -1 : 1; + if (e.ctrlKey){ + newViewDate = this.moveAvailableDate(focusDate, dir, 'moveYear'); + + if (newViewDate) + this._trigger('changeYear', this.viewDate); + } + else if (e.shiftKey){ + newViewDate = this.moveAvailableDate(focusDate, dir, 'moveMonth'); + + if (newViewDate) + this._trigger('changeMonth', this.viewDate); + } + else if (e.keyCode === 37 || e.keyCode === 39){ + newViewDate = this.moveAvailableDate(focusDate, dir, 'moveDay'); + } + else if (!this.weekOfDateIsDisabled(focusDate)){ + newViewDate = this.moveAvailableDate(focusDate, dir, 'moveWeek'); + } + if (newViewDate){ + this.focusDate = this.viewDate = newViewDate; + this.setValue(); + this.fill(); + e.preventDefault(); + } + break; + case 13: // enter + if (!this.o.forceParse) + break; + focusDate = this.focusDate || this.dates.get(-1) || this.viewDate; + if (this.o.keyboardNavigation) { + this._toggle_multidate(focusDate); + dateChanged = true; + } + this.focusDate = null; + this.viewDate = this.dates.get(-1) || this.viewDate; + this.setValue(); + this.fill(); + if (this.picker.is(':visible')){ + e.preventDefault(); + e.stopPropagation(); + if (this.o.autoclose) + this.hide(); + } + break; + case 9: // tab + this.focusDate = null; + this.viewDate = this.dates.get(-1) || this.viewDate; + this.fill(); + this.hide(); + break; + } + if (dateChanged){ + if (this.dates.length) + this._trigger('changeDate'); + else + this._trigger('clearDate'); + var element; + if (this.isInput){ + element = this.element; + } + else if (this.component){ + element = this.element.find('input'); + } + if (element){ + element.change(); + } + } + }, + + showMode: function(dir){ + if (dir){ + this.viewMode = Math.max(this.o.minViewMode, Math.min(this.o.maxViewMode, this.viewMode + dir)); + } + this.picker + .children('div') + .hide() + .filter('.datepicker-' + DPGlobal.modes[this.viewMode].clsName) + .show(); + this.updateNavArrows(); + } + }; + + var DateRangePicker = function(element, options){ + $(element).data('datepicker', this); + this.element = $(element); + this.inputs = $.map(options.inputs, function(i){ + return i.jquery ? i[0] : i; + }); + delete options.inputs; + + datepickerPlugin.call($(this.inputs), options) + .on('changeDate', $.proxy(this.dateUpdated, this)); + + this.pickers = $.map(this.inputs, function(i){ + return $(i).data('datepicker'); + }); + this.updateDates(); + }; + DateRangePicker.prototype = { + updateDates: function(){ + this.dates = $.map(this.pickers, function(i){ + return i.getUTCDate(); + }); + this.updateRanges(); + }, + updateRanges: function(){ + var range = $.map(this.dates, function(d){ + return d.valueOf(); + }); + $.each(this.pickers, function(i, p){ + p.setRange(range); + }); + }, + dateUpdated: function(e){ + // `this.updating` is a workaround for preventing infinite recursion + // between `changeDate` triggering and `setUTCDate` calling. Until + // there is a better mechanism. + if (this.updating) + return; + this.updating = true; + + var dp = $(e.target).data('datepicker'); + + if (typeof(dp) === "undefined") { + return; + } + + var new_date = dp.getUTCDate(), + i = $.inArray(e.target, this.inputs), + j = i - 1, + k = i + 1, + l = this.inputs.length; + if (i === -1) + return; + + $.each(this.pickers, function(i, p){ + if (!p.getUTCDate()) + p.setUTCDate(new_date); + }); + + if (new_date < this.dates[j]){ + // Date being moved earlier/left + while (j >= 0 && new_date < this.dates[j]){ + this.pickers[j--].setUTCDate(new_date); + } + } + else if (new_date > this.dates[k]){ + // Date being moved later/right + while (k < l && new_date > this.dates[k]){ + this.pickers[k++].setUTCDate(new_date); + } + } + this.updateDates(); + + delete this.updating; + }, + remove: function(){ + $.map(this.pickers, function(p){ p.remove(); }); + delete this.element.data().datepicker; + } + }; + + function opts_from_el(el, prefix){ + // Derive options from element data-attrs + var data = $(el).data(), + out = {}, inkey, + replace = new RegExp('^' + prefix.toLowerCase() + '([A-Z])'); + prefix = new RegExp('^' + prefix.toLowerCase()); + function re_lower(_,a){ + return a.toLowerCase(); + } + for (var key in data) + if (prefix.test(key)){ + inkey = key.replace(replace, re_lower); + out[inkey] = data[key]; + } + return out; + } + + function opts_from_locale(lang){ + // Derive options from locale plugins + var out = {}; + // Check if "de-DE" style date is available, if not language should + // fallback to 2 letter code eg "de" + if (!dates[lang]){ + lang = lang.split('-')[0]; + if (!dates[lang]) + return; + } + var d = dates[lang]; + $.each(locale_opts, function(i,k){ + if (k in d) + out[k] = d[k]; + }); + return out; + } + + var old = $.fn.datepicker; + var datepickerPlugin = function(option){ + var args = Array.apply(null, arguments); + args.shift(); + var internal_return; + this.each(function(){ + var $this = $(this), + data = $this.data('datepicker'), + options = typeof option === 'object' && option; + if (!data){ + var elopts = opts_from_el(this, 'date'), + // Preliminary otions + xopts = $.extend({}, defaults, elopts, options), + locopts = opts_from_locale(xopts.language), + // Options priority: js args, data-attrs, locales, defaults + opts = $.extend({}, defaults, locopts, elopts, options); + if ($this.hasClass('input-daterange') || opts.inputs){ + $.extend(opts, { + inputs: opts.inputs || $this.find('input').toArray() + }); + data = new DateRangePicker(this, opts); + } + else { + data = new Datepicker(this, opts); + } + $this.data('datepicker', data); + } + if (typeof option === 'string' && typeof data[option] === 'function'){ + internal_return = data[option].apply(data, args); + } + }); + + if ( + internal_return === undefined || + internal_return instanceof Datepicker || + internal_return instanceof DateRangePicker + ) + return this; + + if (this.length > 1) + throw new Error('Using only allowed for the collection of a single element (' + option + ' function)'); + else + return internal_return; + }; + $.fn.datepicker = datepickerPlugin; + + var defaults = $.fn.datepicker.defaults = { + autoclose: false, + beforeShowDay: $.noop, + beforeShowMonth: $.noop, + beforeShowYear: $.noop, + calendarWeeks: false, + clearBtn: false, + toggleActive: false, + daysOfWeekDisabled: [], + daysOfWeekHighlighted: [], + datesDisabled: [], + endDate: Infinity, + forceParse: true, + format: 'mm/dd/yyyy', + keyboardNavigation: true, + language: 'en', + minViewMode: 0, + maxViewMode: 2, + multidate: false, + multidateSeparator: ',', + orientation: "auto", + rtl: false, + startDate: -Infinity, + startView: 0, + todayBtn: false, + todayHighlight: false, + weekStart: 0, + disableTouchKeyboard: false, + enableOnReadonly: true, + showOnFocus: true, + zIndexOffset: 10, + container: 'body', + immediateUpdates: false, + title: '' + }; + var locale_opts = $.fn.datepicker.locale_opts = [ + 'format', + 'rtl', + 'weekStart' + ]; + $.fn.datepicker.Constructor = Datepicker; + var dates = $.fn.datepicker.dates = { + en: { + days: ["Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday"], + daysShort: ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"], + daysMin: ["Su", "Mo", "Tu", "We", "Th", "Fr", "Sa"], + months: ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"], + monthsShort: ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"], + today: "Today", + clear: "Clear", + titleFormat: "MM yyyy" + } + }; + + var DPGlobal = { + modes: [ + { + clsName: 'days', + navFnc: 'Month', + navStep: 1 + }, + { + clsName: 'months', + navFnc: 'FullYear', + navStep: 1 + }, + { + clsName: 'years', + navFnc: 'FullYear', + navStep: 10 + }], + isLeapYear: function(year){ + return (((year % 4 === 0) && (year % 100 !== 0)) || (year % 400 === 0)); + }, + getDaysInMonth: function(year, month){ + return [31, (DPGlobal.isLeapYear(year) ? 29 : 28), 31, 30, 31, 30, 31, 31, 30, 31, 30, 31][month]; + }, + validParts: /dd?|DD?|mm?|MM?|yy(?:yy)?/g, + nonpunctuation: /[^ -\/:-@\[\u3400-\u9fff-`{-~\t\n\r]+/g, + parseFormat: function(format){ + if (typeof format.toValue === 'function' && typeof format.toDisplay === 'function') + return format; + // IE treats \0 as a string end in inputs (truncating the value), + // so it's a bad format delimiter, anyway + var separators = format.replace(this.validParts, '\0').split('\0'), + parts = format.match(this.validParts); + if (!separators || !separators.length || !parts || parts.length === 0){ + throw new Error("Invalid date format."); + } + return {separators: separators, parts: parts}; + }, + parseDate: function(date, format, language){ + if (!date) + return undefined; + if (date instanceof Date) + return date; + if (typeof format === 'string') + format = DPGlobal.parseFormat(format); + if (format.toValue) + return format.toValue(date, format, language); + var part_re = /([\-+]\d+)([dmwy])/, + parts = date.match(/([\-+]\d+)([dmwy])/g), + fn_map = { + d: 'moveDay', + m: 'moveMonth', + w: 'moveWeek', + y: 'moveYear' + }, + part, dir, i, fn; + if (/^[\-+]\d+[dmwy]([\s,]+[\-+]\d+[dmwy])*$/.test(date)){ + date = new Date(); + for (i=0; i < parts.length; i++){ + part = part_re.exec(parts[i]); + dir = parseInt(part[1]); + fn = fn_map[part[2]]; + date = Datepicker.prototype[fn](date, dir); + } + return UTCDate(date.getUTCFullYear(), date.getUTCMonth(), date.getUTCDate()); + } + parts = date && date.match(this.nonpunctuation) || []; + date = new Date(); + var parsed = {}, + setters_order = ['yyyy', 'yy', 'M', 'MM', 'm', 'mm', 'd', 'dd'], + setters_map = { + yyyy: function(d,v){ + return d.setUTCFullYear(v); + }, + yy: function(d,v){ + return d.setUTCFullYear(2000+v); + }, + m: function(d,v){ + if (isNaN(d)) + return d; + v -= 1; + while (v < 0) v += 12; + v %= 12; + d.setUTCMonth(v); + while (d.getUTCMonth() !== v) + d.setUTCDate(d.getUTCDate()-1); + return d; + }, + d: function(d,v){ + return d.setUTCDate(v); + } + }, + val, filtered; + setters_map['M'] = setters_map['MM'] = setters_map['mm'] = setters_map['m']; + setters_map['dd'] = setters_map['d']; + date = UTCToday(); + var fparts = format.parts.slice(); + // Remove noop parts + if (parts.length !== fparts.length){ + fparts = $(fparts).filter(function(i,p){ + return $.inArray(p, setters_order) !== -1; + }).toArray(); + } + // Process remainder + function match_part(){ + var m = this.slice(0, parts[i].length), + p = parts[i].slice(0, m.length); + return m.toLowerCase() === p.toLowerCase(); + } + if (parts.length === fparts.length){ + var cnt; + for (i=0, cnt = fparts.length; i < cnt; i++){ + val = parseInt(parts[i], 10); + part = fparts[i]; + if (isNaN(val)){ + switch (part){ + case 'MM': + filtered = $(dates[language].months).filter(match_part); + val = $.inArray(filtered[0], dates[language].months) + 1; + break; + case 'M': + filtered = $(dates[language].monthsShort).filter(match_part); + val = $.inArray(filtered[0], dates[language].monthsShort) + 1; + break; + } + } + parsed[part] = val; + } + var _date, s; + for (i=0; i < setters_order.length; i++){ + s = setters_order[i]; + if (s in parsed && !isNaN(parsed[s])){ + _date = new Date(date); + setters_map[s](_date, parsed[s]); + if (!isNaN(_date)) + date = _date; + } + } + } + return date; + }, + formatDate: function(date, format, language){ + if (!date) + return ''; + if (typeof format === 'string') + format = DPGlobal.parseFormat(format); + if (format.toDisplay) + return format.toDisplay(date, format, language); + var val = { + d: date.getUTCDate(), + D: dates[language].daysShort[date.getUTCDay()], + DD: dates[language].days[date.getUTCDay()], + m: date.getUTCMonth() + 1, + M: dates[language].monthsShort[date.getUTCMonth()], + MM: dates[language].months[date.getUTCMonth()], + yy: date.getUTCFullYear().toString().substring(2), + yyyy: date.getUTCFullYear() + }; + val.dd = (val.d < 10 ? '0' : '') + val.d; + val.mm = (val.m < 10 ? '0' : '') + val.m; + date = []; + var seps = $.extend([], format.separators); + for (var i=0, cnt = format.parts.length; i <= cnt; i++){ + if (seps.length) + date.push(seps.shift()); + date.push(val[format.parts[i]]); + } + return date.join(''); + }, + headTemplate: ''+ + ''+ + ''+ + ''+ + ''+ + '«'+ + ''+ + '»'+ + ''+ + '', + contTemplate: '', + footTemplate: ''+ + ''+ + ''+ + ''+ + ''+ + ''+ + ''+ + '' + }; + DPGlobal.template = '
'+ + '
'+ + ''+ + DPGlobal.headTemplate+ + ''+ + DPGlobal.footTemplate+ + '
'+ + '
'+ + '
'+ + ''+ + DPGlobal.headTemplate+ + DPGlobal.contTemplate+ + DPGlobal.footTemplate+ + '
'+ + '
'+ + '
'+ + ''+ + DPGlobal.headTemplate+ + DPGlobal.contTemplate+ + DPGlobal.footTemplate+ + '
'+ + '
'+ + '
'; + + $.fn.datepicker.DPGlobal = DPGlobal; + + + /* DATEPICKER NO CONFLICT + * =================== */ + + $.fn.datepicker.noConflict = function(){ + $.fn.datepicker = old; + return this; + }; + + /* DATEPICKER VERSION + * =================== */ + $.fn.datepicker.version = '1.5.1'; + + /* DATEPICKER DATA-API + * ================== */ + + $(document).on( + 'focus.datepicker.data-api click.datepicker.data-api', + '[data-provide="datepicker"]', + function(e){ + var $this = $(this); + if ($this.data('datepicker')) + return; + e.preventDefault(); + // component click requires us to explicitly show it + datepickerPlugin.call($this, 'show'); + } + ); + $(function(){ + datepickerPlugin.call($('[data-provide="datepicker-inline"]')); + }); + +})); diff --git a/public/css/bootstrap-theme.css b/public/assets/bootstrap/css/bootstrap-theme.css similarity index 100% rename from public/css/bootstrap-theme.css rename to public/assets/bootstrap/css/bootstrap-theme.css diff --git a/public/css/bootstrap-theme.css.map b/public/assets/bootstrap/css/bootstrap-theme.css.map similarity index 100% rename from public/css/bootstrap-theme.css.map rename to public/assets/bootstrap/css/bootstrap-theme.css.map diff --git a/public/css/bootstrap-theme.min.css b/public/assets/bootstrap/css/bootstrap-theme.min.css similarity index 100% rename from public/css/bootstrap-theme.min.css rename to public/assets/bootstrap/css/bootstrap-theme.min.css diff --git a/public/css/bootstrap.css b/public/assets/bootstrap/css/bootstrap.css similarity index 100% rename from public/css/bootstrap.css rename to public/assets/bootstrap/css/bootstrap.css diff --git a/public/css/bootstrap.css.map b/public/assets/bootstrap/css/bootstrap.css.map similarity index 100% rename from public/css/bootstrap.css.map rename to public/assets/bootstrap/css/bootstrap.css.map diff --git a/public/css/bootstrap.min.css b/public/assets/bootstrap/css/bootstrap.min.css similarity index 100% rename from public/css/bootstrap.min.css rename to public/assets/bootstrap/css/bootstrap.min.css diff --git a/public/fonts/glyphicons-halflings-regular.eot b/public/assets/bootstrap/fonts/glyphicons-halflings-regular.eot similarity index 100% rename from public/fonts/glyphicons-halflings-regular.eot rename to public/assets/bootstrap/fonts/glyphicons-halflings-regular.eot diff --git a/public/fonts/glyphicons-halflings-regular.svg b/public/assets/bootstrap/fonts/glyphicons-halflings-regular.svg similarity index 100% rename from public/fonts/glyphicons-halflings-regular.svg rename to public/assets/bootstrap/fonts/glyphicons-halflings-regular.svg diff --git a/public/fonts/glyphicons-halflings-regular.ttf b/public/assets/bootstrap/fonts/glyphicons-halflings-regular.ttf similarity index 100% rename from public/fonts/glyphicons-halflings-regular.ttf rename to public/assets/bootstrap/fonts/glyphicons-halflings-regular.ttf diff --git a/public/fonts/glyphicons-halflings-regular.woff b/public/assets/bootstrap/fonts/glyphicons-halflings-regular.woff similarity index 100% rename from public/fonts/glyphicons-halflings-regular.woff rename to public/assets/bootstrap/fonts/glyphicons-halflings-regular.woff diff --git a/public/js/bootstrap.js b/public/assets/bootstrap/js/bootstrap.js similarity index 100% rename from public/js/bootstrap.js rename to public/assets/bootstrap/js/bootstrap.js diff --git a/public/js/bootstrap.min.js b/public/assets/bootstrap/js/bootstrap.min.js similarity index 100% rename from public/js/bootstrap.min.js rename to public/assets/bootstrap/js/bootstrap.min.js diff --git a/public/assets/d3/d3.js b/public/assets/d3/d3.js new file mode 100644 index 000000000..e4d866432 --- /dev/null +++ b/public/assets/d3/d3.js @@ -0,0 +1,9503 @@ +!function() { + var d3 = { + version: "3.5.6" + }; + var d3_arraySlice = [].slice, d3_array = function(list) { + return d3_arraySlice.call(list); + }; + var d3_document = this.document; + function d3_documentElement(node) { + return node && (node.ownerDocument || node.document || node).documentElement; + } + function d3_window(node) { + return node && (node.ownerDocument && node.ownerDocument.defaultView || node.document && node || node.defaultView); + } + if (d3_document) { + try { + d3_array(d3_document.documentElement.childNodes)[0].nodeType; + } catch (e) { + d3_array = function(list) { + var i = list.length, array = new Array(i); + while (i--) array[i] = list[i]; + return array; + }; + } + } + if (!Date.now) Date.now = function() { + return +new Date(); + }; + if (d3_document) { + try { + d3_document.createElement("DIV").style.setProperty("opacity", 0, ""); + } catch (error) { + var d3_element_prototype = this.Element.prototype, d3_element_setAttribute = d3_element_prototype.setAttribute, d3_element_setAttributeNS = d3_element_prototype.setAttributeNS, d3_style_prototype = this.CSSStyleDeclaration.prototype, d3_style_setProperty = d3_style_prototype.setProperty; + d3_element_prototype.setAttribute = function(name, value) { + d3_element_setAttribute.call(this, name, value + ""); + }; + d3_element_prototype.setAttributeNS = function(space, local, value) { + d3_element_setAttributeNS.call(this, space, local, value + ""); + }; + d3_style_prototype.setProperty = function(name, value, priority) { + d3_style_setProperty.call(this, name, value + "", priority); + }; + } + } + d3.ascending = d3_ascending; + function d3_ascending(a, b) { + return a < b ? -1 : a > b ? 1 : a >= b ? 0 : NaN; + } + d3.descending = function(a, b) { + return b < a ? -1 : b > a ? 1 : b >= a ? 0 : NaN; + }; + d3.min = function(array, f) { + var i = -1, n = array.length, a, b; + if (arguments.length === 1) { + while (++i < n) if ((b = array[i]) != null && b >= b) { + a = b; + break; + } + while (++i < n) if ((b = array[i]) != null && a > b) a = b; + } else { + while (++i < n) if ((b = f.call(array, array[i], i)) != null && b >= b) { + a = b; + break; + } + while (++i < n) if ((b = f.call(array, array[i], i)) != null && a > b) a = b; + } + return a; + }; + d3.max = function(array, f) { + var i = -1, n = array.length, a, b; + if (arguments.length === 1) { + while (++i < n) if ((b = array[i]) != null && b >= b) { + a = b; + break; + } + while (++i < n) if ((b = array[i]) != null && b > a) a = b; + } else { + while (++i < n) if ((b = f.call(array, array[i], i)) != null && b >= b) { + a = b; + break; + } + while (++i < n) if ((b = f.call(array, array[i], i)) != null && b > a) a = b; + } + return a; + }; + d3.extent = function(array, f) { + var i = -1, n = array.length, a, b, c; + if (arguments.length === 1) { + while (++i < n) if ((b = array[i]) != null && b >= b) { + a = c = b; + break; + } + while (++i < n) if ((b = array[i]) != null) { + if (a > b) a = b; + if (c < b) c = b; + } + } else { + while (++i < n) if ((b = f.call(array, array[i], i)) != null && b >= b) { + a = c = b; + break; + } + while (++i < n) if ((b = f.call(array, array[i], i)) != null) { + if (a > b) a = b; + if (c < b) c = b; + } + } + return [ a, c ]; + }; + function d3_number(x) { + return x === null ? NaN : +x; + } + function d3_numeric(x) { + return !isNaN(x); + } + d3.sum = function(array, f) { + var s = 0, n = array.length, a, i = -1; + if (arguments.length === 1) { + while (++i < n) if (d3_numeric(a = +array[i])) s += a; + } else { + while (++i < n) if (d3_numeric(a = +f.call(array, array[i], i))) s += a; + } + return s; + }; + d3.mean = function(array, f) { + var s = 0, n = array.length, a, i = -1, j = n; + if (arguments.length === 1) { + while (++i < n) if (d3_numeric(a = d3_number(array[i]))) s += a; else --j; + } else { + while (++i < n) if (d3_numeric(a = d3_number(f.call(array, array[i], i)))) s += a; else --j; + } + if (j) return s / j; + }; + d3.quantile = function(values, p) { + var H = (values.length - 1) * p + 1, h = Math.floor(H), v = +values[h - 1], e = H - h; + return e ? v + e * (values[h] - v) : v; + }; + d3.median = function(array, f) { + var numbers = [], n = array.length, a, i = -1; + if (arguments.length === 1) { + while (++i < n) if (d3_numeric(a = d3_number(array[i]))) numbers.push(a); + } else { + while (++i < n) if (d3_numeric(a = d3_number(f.call(array, array[i], i)))) numbers.push(a); + } + if (numbers.length) return d3.quantile(numbers.sort(d3_ascending), .5); + }; + d3.variance = function(array, f) { + var n = array.length, m = 0, a, d, s = 0, i = -1, j = 0; + if (arguments.length === 1) { + while (++i < n) { + if (d3_numeric(a = d3_number(array[i]))) { + d = a - m; + m += d / ++j; + s += d * (a - m); + } + } + } else { + while (++i < n) { + if (d3_numeric(a = d3_number(f.call(array, array[i], i)))) { + d = a - m; + m += d / ++j; + s += d * (a - m); + } + } + } + if (j > 1) return s / (j - 1); + }; + d3.deviation = function() { + var v = d3.variance.apply(this, arguments); + return v ? Math.sqrt(v) : v; + }; + function d3_bisector(compare) { + return { + left: function(a, x, lo, hi) { + if (arguments.length < 3) lo = 0; + if (arguments.length < 4) hi = a.length; + while (lo < hi) { + var mid = lo + hi >>> 1; + if (compare(a[mid], x) < 0) lo = mid + 1; else hi = mid; + } + return lo; + }, + right: function(a, x, lo, hi) { + if (arguments.length < 3) lo = 0; + if (arguments.length < 4) hi = a.length; + while (lo < hi) { + var mid = lo + hi >>> 1; + if (compare(a[mid], x) > 0) hi = mid; else lo = mid + 1; + } + return lo; + } + }; + } + var d3_bisect = d3_bisector(d3_ascending); + d3.bisectLeft = d3_bisect.left; + d3.bisect = d3.bisectRight = d3_bisect.right; + d3.bisector = function(f) { + return d3_bisector(f.length === 1 ? function(d, x) { + return d3_ascending(f(d), x); + } : f); + }; + d3.shuffle = function(array, i0, i1) { + if ((m = arguments.length) < 3) { + i1 = array.length; + if (m < 2) i0 = 0; + } + var m = i1 - i0, t, i; + while (m) { + i = Math.random() * m-- | 0; + t = array[m + i0], array[m + i0] = array[i + i0], array[i + i0] = t; + } + return array; + }; + d3.permute = function(array, indexes) { + var i = indexes.length, permutes = new Array(i); + while (i--) permutes[i] = array[indexes[i]]; + return permutes; + }; + d3.pairs = function(array) { + var i = 0, n = array.length - 1, p0, p1 = array[0], pairs = new Array(n < 0 ? 0 : n); + while (i < n) pairs[i] = [ p0 = p1, p1 = array[++i] ]; + return pairs; + }; + d3.zip = function() { + if (!(n = arguments.length)) return []; + for (var i = -1, m = d3.min(arguments, d3_zipLength), zips = new Array(m); ++i < m; ) { + for (var j = -1, n, zip = zips[i] = new Array(n); ++j < n; ) { + zip[j] = arguments[j][i]; + } + } + return zips; + }; + function d3_zipLength(d) { + return d.length; + } + d3.transpose = function(matrix) { + return d3.zip.apply(d3, matrix); + }; + d3.keys = function(map) { + var keys = []; + for (var key in map) keys.push(key); + return keys; + }; + d3.values = function(map) { + var values = []; + for (var key in map) values.push(map[key]); + return values; + }; + d3.entries = function(map) { + var entries = []; + for (var key in map) entries.push({ + key: key, + value: map[key] + }); + return entries; + }; + d3.merge = function(arrays) { + var n = arrays.length, m, i = -1, j = 0, merged, array; + while (++i < n) j += arrays[i].length; + merged = new Array(j); + while (--n >= 0) { + array = arrays[n]; + m = array.length; + while (--m >= 0) { + merged[--j] = array[m]; + } + } + return merged; + }; + var abs = Math.abs; + d3.range = function(start, stop, step) { + if (arguments.length < 3) { + step = 1; + if (arguments.length < 2) { + stop = start; + start = 0; + } + } + if ((stop - start) / step === Infinity) throw new Error("infinite range"); + var range = [], k = d3_range_integerScale(abs(step)), i = -1, j; + start *= k, stop *= k, step *= k; + if (step < 0) while ((j = start + step * ++i) > stop) range.push(j / k); else while ((j = start + step * ++i) < stop) range.push(j / k); + return range; + }; + function d3_range_integerScale(x) { + var k = 1; + while (x * k % 1) k *= 10; + return k; + } + function d3_class(ctor, properties) { + for (var key in properties) { + Object.defineProperty(ctor.prototype, key, { + value: properties[key], + enumerable: false + }); + } + } + d3.map = function(object, f) { + var map = new d3_Map(); + if (object instanceof d3_Map) { + object.forEach(function(key, value) { + map.set(key, value); + }); + } else if (Array.isArray(object)) { + var i = -1, n = object.length, o; + if (arguments.length === 1) while (++i < n) map.set(i, object[i]); else while (++i < n) map.set(f.call(object, o = object[i], i), o); + } else { + for (var key in object) map.set(key, object[key]); + } + return map; + }; + function d3_Map() { + this._ = Object.create(null); + } + var d3_map_proto = "__proto__", d3_map_zero = "\x00"; + d3_class(d3_Map, { + has: d3_map_has, + get: function(key) { + return this._[d3_map_escape(key)]; + }, + set: function(key, value) { + return this._[d3_map_escape(key)] = value; + }, + remove: d3_map_remove, + keys: d3_map_keys, + values: function() { + var values = []; + for (var key in this._) values.push(this._[key]); + return values; + }, + entries: function() { + var entries = []; + for (var key in this._) entries.push({ + key: d3_map_unescape(key), + value: this._[key] + }); + return entries; + }, + size: d3_map_size, + empty: d3_map_empty, + forEach: function(f) { + for (var key in this._) f.call(this, d3_map_unescape(key), this._[key]); + } + }); + function d3_map_escape(key) { + return (key += "") === d3_map_proto || key[0] === d3_map_zero ? d3_map_zero + key : key; + } + function d3_map_unescape(key) { + return (key += "")[0] === d3_map_zero ? key.slice(1) : key; + } + function d3_map_has(key) { + return d3_map_escape(key) in this._; + } + function d3_map_remove(key) { + return (key = d3_map_escape(key)) in this._ && delete this._[key]; + } + function d3_map_keys() { + var keys = []; + for (var key in this._) keys.push(d3_map_unescape(key)); + return keys; + } + function d3_map_size() { + var size = 0; + for (var key in this._) ++size; + return size; + } + function d3_map_empty() { + for (var key in this._) return false; + return true; + } + d3.nest = function() { + var nest = {}, keys = [], sortKeys = [], sortValues, rollup; + function map(mapType, array, depth) { + if (depth >= keys.length) return rollup ? rollup.call(nest, array) : sortValues ? array.sort(sortValues) : array; + var i = -1, n = array.length, key = keys[depth++], keyValue, object, setter, valuesByKey = new d3_Map(), values; + while (++i < n) { + if (values = valuesByKey.get(keyValue = key(object = array[i]))) { + values.push(object); + } else { + valuesByKey.set(keyValue, [ object ]); + } + } + if (mapType) { + object = mapType(); + setter = function(keyValue, values) { + object.set(keyValue, map(mapType, values, depth)); + }; + } else { + object = {}; + setter = function(keyValue, values) { + object[keyValue] = map(mapType, values, depth); + }; + } + valuesByKey.forEach(setter); + return object; + } + function entries(map, depth) { + if (depth >= keys.length) return map; + var array = [], sortKey = sortKeys[depth++]; + map.forEach(function(key, keyMap) { + array.push({ + key: key, + values: entries(keyMap, depth) + }); + }); + return sortKey ? array.sort(function(a, b) { + return sortKey(a.key, b.key); + }) : array; + } + nest.map = function(array, mapType) { + return map(mapType, array, 0); + }; + nest.entries = function(array) { + return entries(map(d3.map, array, 0), 0); + }; + nest.key = function(d) { + keys.push(d); + return nest; + }; + nest.sortKeys = function(order) { + sortKeys[keys.length - 1] = order; + return nest; + }; + nest.sortValues = function(order) { + sortValues = order; + return nest; + }; + nest.rollup = function(f) { + rollup = f; + return nest; + }; + return nest; + }; + d3.set = function(array) { + var set = new d3_Set(); + if (array) for (var i = 0, n = array.length; i < n; ++i) set.add(array[i]); + return set; + }; + function d3_Set() { + this._ = Object.create(null); + } + d3_class(d3_Set, { + has: d3_map_has, + add: function(key) { + this._[d3_map_escape(key += "")] = true; + return key; + }, + remove: d3_map_remove, + values: d3_map_keys, + size: d3_map_size, + empty: d3_map_empty, + forEach: function(f) { + for (var key in this._) f.call(this, d3_map_unescape(key)); + } + }); + d3.behavior = {}; + function d3_identity(d) { + return d; + } + d3.rebind = function(target, source) { + var i = 1, n = arguments.length, method; + while (++i < n) target[method = arguments[i]] = d3_rebind(target, source, source[method]); + return target; + }; + function d3_rebind(target, source, method) { + return function() { + var value = method.apply(source, arguments); + return value === source ? target : value; + }; + } + function d3_vendorSymbol(object, name) { + if (name in object) return name; + name = name.charAt(0).toUpperCase() + name.slice(1); + for (var i = 0, n = d3_vendorPrefixes.length; i < n; ++i) { + var prefixName = d3_vendorPrefixes[i] + name; + if (prefixName in object) return prefixName; + } + } + var d3_vendorPrefixes = [ "webkit", "ms", "moz", "Moz", "o", "O" ]; + function d3_noop() {} + d3.dispatch = function() { + var dispatch = new d3_dispatch(), i = -1, n = arguments.length; + while (++i < n) dispatch[arguments[i]] = d3_dispatch_event(dispatch); + return dispatch; + }; + function d3_dispatch() {} + d3_dispatch.prototype.on = function(type, listener) { + var i = type.indexOf("."), name = ""; + if (i >= 0) { + name = type.slice(i + 1); + type = type.slice(0, i); + } + if (type) return arguments.length < 2 ? this[type].on(name) : this[type].on(name, listener); + if (arguments.length === 2) { + if (listener == null) for (type in this) { + if (this.hasOwnProperty(type)) this[type].on(name, null); + } + return this; + } + }; + function d3_dispatch_event(dispatch) { + var listeners = [], listenerByName = new d3_Map(); + function event() { + var z = listeners, i = -1, n = z.length, l; + while (++i < n) if (l = z[i].on) l.apply(this, arguments); + return dispatch; + } + event.on = function(name, listener) { + var l = listenerByName.get(name), i; + if (arguments.length < 2) return l && l.on; + if (l) { + l.on = null; + listeners = listeners.slice(0, i = listeners.indexOf(l)).concat(listeners.slice(i + 1)); + listenerByName.remove(name); + } + if (listener) listeners.push(listenerByName.set(name, { + on: listener + })); + return dispatch; + }; + return event; + } + d3.event = null; + function d3_eventPreventDefault() { + d3.event.preventDefault(); + } + function d3_eventSource() { + var e = d3.event, s; + while (s = e.sourceEvent) e = s; + return e; + } + function d3_eventDispatch(target) { + var dispatch = new d3_dispatch(), i = 0, n = arguments.length; + while (++i < n) dispatch[arguments[i]] = d3_dispatch_event(dispatch); + dispatch.of = function(thiz, argumentz) { + return function(e1) { + try { + var e0 = e1.sourceEvent = d3.event; + e1.target = target; + d3.event = e1; + dispatch[e1.type].apply(thiz, argumentz); + } finally { + d3.event = e0; + } + }; + }; + return dispatch; + } + d3.requote = function(s) { + return s.replace(d3_requote_re, "\\$&"); + }; + var d3_requote_re = /[\\\^\$\*\+\?\|\[\]\(\)\.\{\}]/g; + var d3_subclass = {}.__proto__ ? function(object, prototype) { + object.__proto__ = prototype; + } : function(object, prototype) { + for (var property in prototype) object[property] = prototype[property]; + }; + function d3_selection(groups) { + d3_subclass(groups, d3_selectionPrototype); + return groups; + } + var d3_select = function(s, n) { + return n.querySelector(s); + }, d3_selectAll = function(s, n) { + return n.querySelectorAll(s); + }, d3_selectMatches = function(n, s) { + var d3_selectMatcher = n.matches || n[d3_vendorSymbol(n, "matchesSelector")]; + d3_selectMatches = function(n, s) { + return d3_selectMatcher.call(n, s); + }; + return d3_selectMatches(n, s); + }; + if (typeof Sizzle === "function") { + d3_select = function(s, n) { + return Sizzle(s, n)[0] || null; + }; + d3_selectAll = Sizzle; + d3_selectMatches = Sizzle.matchesSelector; + } + d3.selection = function() { + return d3.select(d3_document.documentElement); + }; + var d3_selectionPrototype = d3.selection.prototype = []; + d3_selectionPrototype.select = function(selector) { + var subgroups = [], subgroup, subnode, group, node; + selector = d3_selection_selector(selector); + for (var j = -1, m = this.length; ++j < m; ) { + subgroups.push(subgroup = []); + subgroup.parentNode = (group = this[j]).parentNode; + for (var i = -1, n = group.length; ++i < n; ) { + if (node = group[i]) { + subgroup.push(subnode = selector.call(node, node.__data__, i, j)); + if (subnode && "__data__" in node) subnode.__data__ = node.__data__; + } else { + subgroup.push(null); + } + } + } + return d3_selection(subgroups); + }; + function d3_selection_selector(selector) { + return typeof selector === "function" ? selector : function() { + return d3_select(selector, this); + }; + } + d3_selectionPrototype.selectAll = function(selector) { + var subgroups = [], subgroup, node; + selector = d3_selection_selectorAll(selector); + for (var j = -1, m = this.length; ++j < m; ) { + for (var group = this[j], i = -1, n = group.length; ++i < n; ) { + if (node = group[i]) { + subgroups.push(subgroup = d3_array(selector.call(node, node.__data__, i, j))); + subgroup.parentNode = node; + } + } + } + return d3_selection(subgroups); + }; + function d3_selection_selectorAll(selector) { + return typeof selector === "function" ? selector : function() { + return d3_selectAll(selector, this); + }; + } + var d3_nsPrefix = { + svg: "http://www.w3.org/2000/svg", + xhtml: "http://www.w3.org/1999/xhtml", + xlink: "http://www.w3.org/1999/xlink", + xml: "http://www.w3.org/XML/1998/namespace", + xmlns: "http://www.w3.org/2000/xmlns/" + }; + d3.ns = { + prefix: d3_nsPrefix, + qualify: function(name) { + var i = name.indexOf(":"), prefix = name; + if (i >= 0) { + prefix = name.slice(0, i); + name = name.slice(i + 1); + } + return d3_nsPrefix.hasOwnProperty(prefix) ? { + space: d3_nsPrefix[prefix], + local: name + } : name; + } + }; + d3_selectionPrototype.attr = function(name, value) { + if (arguments.length < 2) { + if (typeof name === "string") { + var node = this.node(); + name = d3.ns.qualify(name); + return name.local ? node.getAttributeNS(name.space, name.local) : node.getAttribute(name); + } + for (value in name) this.each(d3_selection_attr(value, name[value])); + return this; + } + return this.each(d3_selection_attr(name, value)); + }; + function d3_selection_attr(name, value) { + name = d3.ns.qualify(name); + function attrNull() { + this.removeAttribute(name); + } + function attrNullNS() { + this.removeAttributeNS(name.space, name.local); + } + function attrConstant() { + this.setAttribute(name, value); + } + function attrConstantNS() { + this.setAttributeNS(name.space, name.local, value); + } + function attrFunction() { + var x = value.apply(this, arguments); + if (x == null) this.removeAttribute(name); else this.setAttribute(name, x); + } + function attrFunctionNS() { + var x = value.apply(this, arguments); + if (x == null) this.removeAttributeNS(name.space, name.local); else this.setAttributeNS(name.space, name.local, x); + } + return value == null ? name.local ? attrNullNS : attrNull : typeof value === "function" ? name.local ? attrFunctionNS : attrFunction : name.local ? attrConstantNS : attrConstant; + } + function d3_collapse(s) { + return s.trim().replace(/\s+/g, " "); + } + d3_selectionPrototype.classed = function(name, value) { + if (arguments.length < 2) { + if (typeof name === "string") { + var node = this.node(), n = (name = d3_selection_classes(name)).length, i = -1; + if (value = node.classList) { + while (++i < n) if (!value.contains(name[i])) return false; + } else { + value = node.getAttribute("class"); + while (++i < n) if (!d3_selection_classedRe(name[i]).test(value)) return false; + } + return true; + } + for (value in name) this.each(d3_selection_classed(value, name[value])); + return this; + } + return this.each(d3_selection_classed(name, value)); + }; + function d3_selection_classedRe(name) { + return new RegExp("(?:^|\\s+)" + d3.requote(name) + "(?:\\s+|$)", "g"); + } + function d3_selection_classes(name) { + return (name + "").trim().split(/^|\s+/); + } + function d3_selection_classed(name, value) { + name = d3_selection_classes(name).map(d3_selection_classedName); + var n = name.length; + function classedConstant() { + var i = -1; + while (++i < n) name[i](this, value); + } + function classedFunction() { + var i = -1, x = value.apply(this, arguments); + while (++i < n) name[i](this, x); + } + return typeof value === "function" ? classedFunction : classedConstant; + } + function d3_selection_classedName(name) { + var re = d3_selection_classedRe(name); + return function(node, value) { + if (c = node.classList) return value ? c.add(name) : c.remove(name); + var c = node.getAttribute("class") || ""; + if (value) { + re.lastIndex = 0; + if (!re.test(c)) node.setAttribute("class", d3_collapse(c + " " + name)); + } else { + node.setAttribute("class", d3_collapse(c.replace(re, " "))); + } + }; + } + d3_selectionPrototype.style = function(name, value, priority) { + var n = arguments.length; + if (n < 3) { + if (typeof name !== "string") { + if (n < 2) value = ""; + for (priority in name) this.each(d3_selection_style(priority, name[priority], value)); + return this; + } + if (n < 2) { + var node = this.node(); + return d3_window(node).getComputedStyle(node, null).getPropertyValue(name); + } + priority = ""; + } + return this.each(d3_selection_style(name, value, priority)); + }; + function d3_selection_style(name, value, priority) { + function styleNull() { + this.style.removeProperty(name); + } + function styleConstant() { + this.style.setProperty(name, value, priority); + } + function styleFunction() { + var x = value.apply(this, arguments); + if (x == null) this.style.removeProperty(name); else this.style.setProperty(name, x, priority); + } + return value == null ? styleNull : typeof value === "function" ? styleFunction : styleConstant; + } + d3_selectionPrototype.property = function(name, value) { + if (arguments.length < 2) { + if (typeof name === "string") return this.node()[name]; + for (value in name) this.each(d3_selection_property(value, name[value])); + return this; + } + return this.each(d3_selection_property(name, value)); + }; + function d3_selection_property(name, value) { + function propertyNull() { + delete this[name]; + } + function propertyConstant() { + this[name] = value; + } + function propertyFunction() { + var x = value.apply(this, arguments); + if (x == null) delete this[name]; else this[name] = x; + } + return value == null ? propertyNull : typeof value === "function" ? propertyFunction : propertyConstant; + } + d3_selectionPrototype.text = function(value) { + return arguments.length ? this.each(typeof value === "function" ? function() { + var v = value.apply(this, arguments); + this.textContent = v == null ? "" : v; + } : value == null ? function() { + this.textContent = ""; + } : function() { + this.textContent = value; + }) : this.node().textContent; + }; + d3_selectionPrototype.html = function(value) { + return arguments.length ? this.each(typeof value === "function" ? function() { + var v = value.apply(this, arguments); + this.innerHTML = v == null ? "" : v; + } : value == null ? function() { + this.innerHTML = ""; + } : function() { + this.innerHTML = value; + }) : this.node().innerHTML; + }; + d3_selectionPrototype.append = function(name) { + name = d3_selection_creator(name); + return this.select(function() { + return this.appendChild(name.apply(this, arguments)); + }); + }; + function d3_selection_creator(name) { + function create() { + var document = this.ownerDocument, namespace = this.namespaceURI; + return namespace ? document.createElementNS(namespace, name) : document.createElement(name); + } + function createNS() { + return this.ownerDocument.createElementNS(name.space, name.local); + } + return typeof name === "function" ? name : (name = d3.ns.qualify(name)).local ? createNS : create; + } + d3_selectionPrototype.insert = function(name, before) { + name = d3_selection_creator(name); + before = d3_selection_selector(before); + return this.select(function() { + return this.insertBefore(name.apply(this, arguments), before.apply(this, arguments) || null); + }); + }; + d3_selectionPrototype.remove = function() { + return this.each(d3_selectionRemove); + }; + function d3_selectionRemove() { + var parent = this.parentNode; + if (parent) parent.removeChild(this); + } + d3_selectionPrototype.data = function(value, key) { + var i = -1, n = this.length, group, node; + if (!arguments.length) { + value = new Array(n = (group = this[0]).length); + while (++i < n) { + if (node = group[i]) { + value[i] = node.__data__; + } + } + return value; + } + function bind(group, groupData) { + var i, n = group.length, m = groupData.length, n0 = Math.min(n, m), updateNodes = new Array(m), enterNodes = new Array(m), exitNodes = new Array(n), node, nodeData; + if (key) { + var nodeByKeyValue = new d3_Map(), keyValues = new Array(n), keyValue; + for (i = -1; ++i < n; ) { + if (nodeByKeyValue.has(keyValue = key.call(node = group[i], node.__data__, i))) { + exitNodes[i] = node; + } else { + nodeByKeyValue.set(keyValue, node); + } + keyValues[i] = keyValue; + } + for (i = -1; ++i < m; ) { + if (!(node = nodeByKeyValue.get(keyValue = key.call(groupData, nodeData = groupData[i], i)))) { + enterNodes[i] = d3_selection_dataNode(nodeData); + } else if (node !== true) { + updateNodes[i] = node; + node.__data__ = nodeData; + } + nodeByKeyValue.set(keyValue, true); + } + for (i = -1; ++i < n; ) { + if (nodeByKeyValue.get(keyValues[i]) !== true) { + exitNodes[i] = group[i]; + } + } + } else { + for (i = -1; ++i < n0; ) { + node = group[i]; + nodeData = groupData[i]; + if (node) { + node.__data__ = nodeData; + updateNodes[i] = node; + } else { + enterNodes[i] = d3_selection_dataNode(nodeData); + } + } + for (;i < m; ++i) { + enterNodes[i] = d3_selection_dataNode(groupData[i]); + } + for (;i < n; ++i) { + exitNodes[i] = group[i]; + } + } + enterNodes.update = updateNodes; + enterNodes.parentNode = updateNodes.parentNode = exitNodes.parentNode = group.parentNode; + enter.push(enterNodes); + update.push(updateNodes); + exit.push(exitNodes); + } + var enter = d3_selection_enter([]), update = d3_selection([]), exit = d3_selection([]); + if (typeof value === "function") { + while (++i < n) { + bind(group = this[i], value.call(group, group.parentNode.__data__, i)); + } + } else { + while (++i < n) { + bind(group = this[i], value); + } + } + update.enter = function() { + return enter; + }; + update.exit = function() { + return exit; + }; + return update; + }; + function d3_selection_dataNode(data) { + return { + __data__: data + }; + } + d3_selectionPrototype.datum = function(value) { + return arguments.length ? this.property("__data__", value) : this.property("__data__"); + }; + d3_selectionPrototype.filter = function(filter) { + var subgroups = [], subgroup, group, node; + if (typeof filter !== "function") filter = d3_selection_filter(filter); + for (var j = 0, m = this.length; j < m; j++) { + subgroups.push(subgroup = []); + subgroup.parentNode = (group = this[j]).parentNode; + for (var i = 0, n = group.length; i < n; i++) { + if ((node = group[i]) && filter.call(node, node.__data__, i, j)) { + subgroup.push(node); + } + } + } + return d3_selection(subgroups); + }; + function d3_selection_filter(selector) { + return function() { + return d3_selectMatches(this, selector); + }; + } + d3_selectionPrototype.order = function() { + for (var j = -1, m = this.length; ++j < m; ) { + for (var group = this[j], i = group.length - 1, next = group[i], node; --i >= 0; ) { + if (node = group[i]) { + if (next && next !== node.nextSibling) next.parentNode.insertBefore(node, next); + next = node; + } + } + } + return this; + }; + d3_selectionPrototype.sort = function(comparator) { + comparator = d3_selection_sortComparator.apply(this, arguments); + for (var j = -1, m = this.length; ++j < m; ) this[j].sort(comparator); + return this.order(); + }; + function d3_selection_sortComparator(comparator) { + if (!arguments.length) comparator = d3_ascending; + return function(a, b) { + return a && b ? comparator(a.__data__, b.__data__) : !a - !b; + }; + } + d3_selectionPrototype.each = function(callback) { + return d3_selection_each(this, function(node, i, j) { + callback.call(node, node.__data__, i, j); + }); + }; + function d3_selection_each(groups, callback) { + for (var j = 0, m = groups.length; j < m; j++) { + for (var group = groups[j], i = 0, n = group.length, node; i < n; i++) { + if (node = group[i]) callback(node, i, j); + } + } + return groups; + } + d3_selectionPrototype.call = function(callback) { + var args = d3_array(arguments); + callback.apply(args[0] = this, args); + return this; + }; + d3_selectionPrototype.empty = function() { + return !this.node(); + }; + d3_selectionPrototype.node = function() { + for (var j = 0, m = this.length; j < m; j++) { + for (var group = this[j], i = 0, n = group.length; i < n; i++) { + var node = group[i]; + if (node) return node; + } + } + return null; + }; + d3_selectionPrototype.size = function() { + var n = 0; + d3_selection_each(this, function() { + ++n; + }); + return n; + }; + function d3_selection_enter(selection) { + d3_subclass(selection, d3_selection_enterPrototype); + return selection; + } + var d3_selection_enterPrototype = []; + d3.selection.enter = d3_selection_enter; + d3.selection.enter.prototype = d3_selection_enterPrototype; + d3_selection_enterPrototype.append = d3_selectionPrototype.append; + d3_selection_enterPrototype.empty = d3_selectionPrototype.empty; + d3_selection_enterPrototype.node = d3_selectionPrototype.node; + d3_selection_enterPrototype.call = d3_selectionPrototype.call; + d3_selection_enterPrototype.size = d3_selectionPrototype.size; + d3_selection_enterPrototype.select = function(selector) { + var subgroups = [], subgroup, subnode, upgroup, group, node; + for (var j = -1, m = this.length; ++j < m; ) { + upgroup = (group = this[j]).update; + subgroups.push(subgroup = []); + subgroup.parentNode = group.parentNode; + for (var i = -1, n = group.length; ++i < n; ) { + if (node = group[i]) { + subgroup.push(upgroup[i] = subnode = selector.call(group.parentNode, node.__data__, i, j)); + subnode.__data__ = node.__data__; + } else { + subgroup.push(null); + } + } + } + return d3_selection(subgroups); + }; + d3_selection_enterPrototype.insert = function(name, before) { + if (arguments.length < 2) before = d3_selection_enterInsertBefore(this); + return d3_selectionPrototype.insert.call(this, name, before); + }; + function d3_selection_enterInsertBefore(enter) { + var i0, j0; + return function(d, i, j) { + var group = enter[j].update, n = group.length, node; + if (j != j0) j0 = j, i0 = 0; + if (i >= i0) i0 = i + 1; + while (!(node = group[i0]) && ++i0 < n) ; + return node; + }; + } + d3.select = function(node) { + var group; + if (typeof node === "string") { + group = [ d3_select(node, d3_document) ]; + group.parentNode = d3_document.documentElement; + } else { + group = [ node ]; + group.parentNode = d3_documentElement(node); + } + return d3_selection([ group ]); + }; + d3.selectAll = function(nodes) { + var group; + if (typeof nodes === "string") { + group = d3_array(d3_selectAll(nodes, d3_document)); + group.parentNode = d3_document.documentElement; + } else { + group = nodes; + group.parentNode = null; + } + return d3_selection([ group ]); + }; + d3_selectionPrototype.on = function(type, listener, capture) { + var n = arguments.length; + if (n < 3) { + if (typeof type !== "string") { + if (n < 2) listener = false; + for (capture in type) this.each(d3_selection_on(capture, type[capture], listener)); + return this; + } + if (n < 2) return (n = this.node()["__on" + type]) && n._; + capture = false; + } + return this.each(d3_selection_on(type, listener, capture)); + }; + function d3_selection_on(type, listener, capture) { + var name = "__on" + type, i = type.indexOf("."), wrap = d3_selection_onListener; + if (i > 0) type = type.slice(0, i); + var filter = d3_selection_onFilters.get(type); + if (filter) type = filter, wrap = d3_selection_onFilter; + function onRemove() { + var l = this[name]; + if (l) { + this.removeEventListener(type, l, l.$); + delete this[name]; + } + } + function onAdd() { + var l = wrap(listener, d3_array(arguments)); + onRemove.call(this); + this.addEventListener(type, this[name] = l, l.$ = capture); + l._ = listener; + } + function removeAll() { + var re = new RegExp("^__on([^.]+)" + d3.requote(type) + "$"), match; + for (var name in this) { + if (match = name.match(re)) { + var l = this[name]; + this.removeEventListener(match[1], l, l.$); + delete this[name]; + } + } + } + return i ? listener ? onAdd : onRemove : listener ? d3_noop : removeAll; + } + var d3_selection_onFilters = d3.map({ + mouseenter: "mouseover", + mouseleave: "mouseout" + }); + if (d3_document) { + d3_selection_onFilters.forEach(function(k) { + if ("on" + k in d3_document) d3_selection_onFilters.remove(k); + }); + } + function d3_selection_onListener(listener, argumentz) { + return function(e) { + var o = d3.event; + d3.event = e; + argumentz[0] = this.__data__; + try { + listener.apply(this, argumentz); + } finally { + d3.event = o; + } + }; + } + function d3_selection_onFilter(listener, argumentz) { + var l = d3_selection_onListener(listener, argumentz); + return function(e) { + var target = this, related = e.relatedTarget; + if (!related || related !== target && !(related.compareDocumentPosition(target) & 8)) { + l.call(target, e); + } + }; + } + var d3_event_dragSelect, d3_event_dragId = 0; + function d3_event_dragSuppress(node) { + var name = ".dragsuppress-" + ++d3_event_dragId, click = "click" + name, w = d3.select(d3_window(node)).on("touchmove" + name, d3_eventPreventDefault).on("dragstart" + name, d3_eventPreventDefault).on("selectstart" + name, d3_eventPreventDefault); + if (d3_event_dragSelect == null) { + d3_event_dragSelect = "onselectstart" in node ? false : d3_vendorSymbol(node.style, "userSelect"); + } + if (d3_event_dragSelect) { + var style = d3_documentElement(node).style, select = style[d3_event_dragSelect]; + style[d3_event_dragSelect] = "none"; + } + return function(suppressClick) { + w.on(name, null); + if (d3_event_dragSelect) style[d3_event_dragSelect] = select; + if (suppressClick) { + var off = function() { + w.on(click, null); + }; + w.on(click, function() { + d3_eventPreventDefault(); + off(); + }, true); + setTimeout(off, 0); + } + }; + } + d3.mouse = function(container) { + return d3_mousePoint(container, d3_eventSource()); + }; + var d3_mouse_bug44083 = this.navigator && /WebKit/.test(this.navigator.userAgent) ? -1 : 0; + function d3_mousePoint(container, e) { + if (e.changedTouches) e = e.changedTouches[0]; + var svg = container.ownerSVGElement || container; + if (svg.createSVGPoint) { + var point = svg.createSVGPoint(); + if (d3_mouse_bug44083 < 0) { + var window = d3_window(container); + if (window.scrollX || window.scrollY) { + svg = d3.select("body").append("svg").style({ + position: "absolute", + top: 0, + left: 0, + margin: 0, + padding: 0, + border: "none" + }, "important"); + var ctm = svg[0][0].getScreenCTM(); + d3_mouse_bug44083 = !(ctm.f || ctm.e); + svg.remove(); + } + } + if (d3_mouse_bug44083) point.x = e.pageX, point.y = e.pageY; else point.x = e.clientX, + point.y = e.clientY; + point = point.matrixTransform(container.getScreenCTM().inverse()); + return [ point.x, point.y ]; + } + var rect = container.getBoundingClientRect(); + return [ e.clientX - rect.left - container.clientLeft, e.clientY - rect.top - container.clientTop ]; + } + d3.touch = function(container, touches, identifier) { + if (arguments.length < 3) identifier = touches, touches = d3_eventSource().changedTouches; + if (touches) for (var i = 0, n = touches.length, touch; i < n; ++i) { + if ((touch = touches[i]).identifier === identifier) { + return d3_mousePoint(container, touch); + } + } + }; + d3.behavior.drag = function() { + var event = d3_eventDispatch(drag, "drag", "dragstart", "dragend"), origin = null, mousedown = dragstart(d3_noop, d3.mouse, d3_window, "mousemove", "mouseup"), touchstart = dragstart(d3_behavior_dragTouchId, d3.touch, d3_identity, "touchmove", "touchend"); + function drag() { + this.on("mousedown.drag", mousedown).on("touchstart.drag", touchstart); + } + function dragstart(id, position, subject, move, end) { + return function() { + var that = this, target = d3.event.target, parent = that.parentNode, dispatch = event.of(that, arguments), dragged = 0, dragId = id(), dragName = ".drag" + (dragId == null ? "" : "-" + dragId), dragOffset, dragSubject = d3.select(subject(target)).on(move + dragName, moved).on(end + dragName, ended), dragRestore = d3_event_dragSuppress(target), position0 = position(parent, dragId); + if (origin) { + dragOffset = origin.apply(that, arguments); + dragOffset = [ dragOffset.x - position0[0], dragOffset.y - position0[1] ]; + } else { + dragOffset = [ 0, 0 ]; + } + dispatch({ + type: "dragstart" + }); + function moved() { + var position1 = position(parent, dragId), dx, dy; + if (!position1) return; + dx = position1[0] - position0[0]; + dy = position1[1] - position0[1]; + dragged |= dx | dy; + position0 = position1; + dispatch({ + type: "drag", + x: position1[0] + dragOffset[0], + y: position1[1] + dragOffset[1], + dx: dx, + dy: dy + }); + } + function ended() { + if (!position(parent, dragId)) return; + dragSubject.on(move + dragName, null).on(end + dragName, null); + dragRestore(dragged && d3.event.target === target); + dispatch({ + type: "dragend" + }); + } + }; + } + drag.origin = function(x) { + if (!arguments.length) return origin; + origin = x; + return drag; + }; + return d3.rebind(drag, event, "on"); + }; + function d3_behavior_dragTouchId() { + return d3.event.changedTouches[0].identifier; + } + d3.touches = function(container, touches) { + if (arguments.length < 2) touches = d3_eventSource().touches; + return touches ? d3_array(touches).map(function(touch) { + var point = d3_mousePoint(container, touch); + point.identifier = touch.identifier; + return point; + }) : []; + }; + var ε = 1e-6, ε2 = ε * ε, π = Math.PI, τ = 2 * π, τε = τ - ε, halfπ = π / 2, d3_radians = π / 180, d3_degrees = 180 / π; + function d3_sgn(x) { + return x > 0 ? 1 : x < 0 ? -1 : 0; + } + function d3_cross2d(a, b, c) { + return (b[0] - a[0]) * (c[1] - a[1]) - (b[1] - a[1]) * (c[0] - a[0]); + } + function d3_acos(x) { + return x > 1 ? 0 : x < -1 ? π : Math.acos(x); + } + function d3_asin(x) { + return x > 1 ? halfπ : x < -1 ? -halfπ : Math.asin(x); + } + function d3_sinh(x) { + return ((x = Math.exp(x)) - 1 / x) / 2; + } + function d3_cosh(x) { + return ((x = Math.exp(x)) + 1 / x) / 2; + } + function d3_tanh(x) { + return ((x = Math.exp(2 * x)) - 1) / (x + 1); + } + function d3_haversin(x) { + return (x = Math.sin(x / 2)) * x; + } + var ρ = Math.SQRT2, ρ2 = 2, ρ4 = 4; + d3.interpolateZoom = function(p0, p1) { + var ux0 = p0[0], uy0 = p0[1], w0 = p0[2], ux1 = p1[0], uy1 = p1[1], w1 = p1[2]; + var dx = ux1 - ux0, dy = uy1 - uy0, d2 = dx * dx + dy * dy, d1 = Math.sqrt(d2), b0 = (w1 * w1 - w0 * w0 + ρ4 * d2) / (2 * w0 * ρ2 * d1), b1 = (w1 * w1 - w0 * w0 - ρ4 * d2) / (2 * w1 * ρ2 * d1), r0 = Math.log(Math.sqrt(b0 * b0 + 1) - b0), r1 = Math.log(Math.sqrt(b1 * b1 + 1) - b1), dr = r1 - r0, S = (dr || Math.log(w1 / w0)) / ρ; + function interpolate(t) { + var s = t * S; + if (dr) { + var coshr0 = d3_cosh(r0), u = w0 / (ρ2 * d1) * (coshr0 * d3_tanh(ρ * s + r0) - d3_sinh(r0)); + return [ ux0 + u * dx, uy0 + u * dy, w0 * coshr0 / d3_cosh(ρ * s + r0) ]; + } + return [ ux0 + t * dx, uy0 + t * dy, w0 * Math.exp(ρ * s) ]; + } + interpolate.duration = S * 1e3; + return interpolate; + }; + d3.behavior.zoom = function() { + var view = { + x: 0, + y: 0, + k: 1 + }, translate0, center0, center, size = [ 960, 500 ], scaleExtent = d3_behavior_zoomInfinity, duration = 250, zooming = 0, mousedown = "mousedown.zoom", mousemove = "mousemove.zoom", mouseup = "mouseup.zoom", mousewheelTimer, touchstart = "touchstart.zoom", touchtime, event = d3_eventDispatch(zoom, "zoomstart", "zoom", "zoomend"), x0, x1, y0, y1; + if (!d3_behavior_zoomWheel) { + d3_behavior_zoomWheel = "onwheel" in d3_document ? (d3_behavior_zoomDelta = function() { + return -d3.event.deltaY * (d3.event.deltaMode ? 120 : 1); + }, "wheel") : "onmousewheel" in d3_document ? (d3_behavior_zoomDelta = function() { + return d3.event.wheelDelta; + }, "mousewheel") : (d3_behavior_zoomDelta = function() { + return -d3.event.detail; + }, "MozMousePixelScroll"); + } + function zoom(g) { + g.on(mousedown, mousedowned).on(d3_behavior_zoomWheel + ".zoom", mousewheeled).on("dblclick.zoom", dblclicked).on(touchstart, touchstarted); + } + zoom.event = function(g) { + g.each(function() { + var dispatch = event.of(this, arguments), view1 = view; + if (d3_transitionInheritId) { + d3.select(this).transition().each("start.zoom", function() { + view = this.__chart__ || { + x: 0, + y: 0, + k: 1 + }; + zoomstarted(dispatch); + }).tween("zoom:zoom", function() { + var dx = size[0], dy = size[1], cx = center0 ? center0[0] : dx / 2, cy = center0 ? center0[1] : dy / 2, i = d3.interpolateZoom([ (cx - view.x) / view.k, (cy - view.y) / view.k, dx / view.k ], [ (cx - view1.x) / view1.k, (cy - view1.y) / view1.k, dx / view1.k ]); + return function(t) { + var l = i(t), k = dx / l[2]; + this.__chart__ = view = { + x: cx - l[0] * k, + y: cy - l[1] * k, + k: k + }; + zoomed(dispatch); + }; + }).each("interrupt.zoom", function() { + zoomended(dispatch); + }).each("end.zoom", function() { + zoomended(dispatch); + }); + } else { + this.__chart__ = view; + zoomstarted(dispatch); + zoomed(dispatch); + zoomended(dispatch); + } + }); + }; + zoom.translate = function(_) { + if (!arguments.length) return [ view.x, view.y ]; + view = { + x: +_[0], + y: +_[1], + k: view.k + }; + rescale(); + return zoom; + }; + zoom.scale = function(_) { + if (!arguments.length) return view.k; + view = { + x: view.x, + y: view.y, + k: +_ + }; + rescale(); + return zoom; + }; + zoom.scaleExtent = function(_) { + if (!arguments.length) return scaleExtent; + scaleExtent = _ == null ? d3_behavior_zoomInfinity : [ +_[0], +_[1] ]; + return zoom; + }; + zoom.center = function(_) { + if (!arguments.length) return center; + center = _ && [ +_[0], +_[1] ]; + return zoom; + }; + zoom.size = function(_) { + if (!arguments.length) return size; + size = _ && [ +_[0], +_[1] ]; + return zoom; + }; + zoom.duration = function(_) { + if (!arguments.length) return duration; + duration = +_; + return zoom; + }; + zoom.x = function(z) { + if (!arguments.length) return x1; + x1 = z; + x0 = z.copy(); + view = { + x: 0, + y: 0, + k: 1 + }; + return zoom; + }; + zoom.y = function(z) { + if (!arguments.length) return y1; + y1 = z; + y0 = z.copy(); + view = { + x: 0, + y: 0, + k: 1 + }; + return zoom; + }; + function location(p) { + return [ (p[0] - view.x) / view.k, (p[1] - view.y) / view.k ]; + } + function point(l) { + return [ l[0] * view.k + view.x, l[1] * view.k + view.y ]; + } + function scaleTo(s) { + view.k = Math.max(scaleExtent[0], Math.min(scaleExtent[1], s)); + } + function translateTo(p, l) { + l = point(l); + view.x += p[0] - l[0]; + view.y += p[1] - l[1]; + } + function zoomTo(that, p, l, k) { + that.__chart__ = { + x: view.x, + y: view.y, + k: view.k + }; + scaleTo(Math.pow(2, k)); + translateTo(center0 = p, l); + that = d3.select(that); + if (duration > 0) that = that.transition().duration(duration); + that.call(zoom.event); + } + function rescale() { + if (x1) x1.domain(x0.range().map(function(x) { + return (x - view.x) / view.k; + }).map(x0.invert)); + if (y1) y1.domain(y0.range().map(function(y) { + return (y - view.y) / view.k; + }).map(y0.invert)); + } + function zoomstarted(dispatch) { + if (!zooming++) dispatch({ + type: "zoomstart" + }); + } + function zoomed(dispatch) { + rescale(); + dispatch({ + type: "zoom", + scale: view.k, + translate: [ view.x, view.y ] + }); + } + function zoomended(dispatch) { + if (!--zooming) dispatch({ + type: "zoomend" + }), center0 = null; + } + function mousedowned() { + var that = this, target = d3.event.target, dispatch = event.of(that, arguments), dragged = 0, subject = d3.select(d3_window(that)).on(mousemove, moved).on(mouseup, ended), location0 = location(d3.mouse(that)), dragRestore = d3_event_dragSuppress(that); + d3_selection_interrupt.call(that); + zoomstarted(dispatch); + function moved() { + dragged = 1; + translateTo(d3.mouse(that), location0); + zoomed(dispatch); + } + function ended() { + subject.on(mousemove, null).on(mouseup, null); + dragRestore(dragged && d3.event.target === target); + zoomended(dispatch); + } + } + function touchstarted() { + var that = this, dispatch = event.of(that, arguments), locations0 = {}, distance0 = 0, scale0, zoomName = ".zoom-" + d3.event.changedTouches[0].identifier, touchmove = "touchmove" + zoomName, touchend = "touchend" + zoomName, targets = [], subject = d3.select(that), dragRestore = d3_event_dragSuppress(that); + started(); + zoomstarted(dispatch); + subject.on(mousedown, null).on(touchstart, started); + function relocate() { + var touches = d3.touches(that); + scale0 = view.k; + touches.forEach(function(t) { + if (t.identifier in locations0) locations0[t.identifier] = location(t); + }); + return touches; + } + function started() { + var target = d3.event.target; + d3.select(target).on(touchmove, moved).on(touchend, ended); + targets.push(target); + var changed = d3.event.changedTouches; + for (var i = 0, n = changed.length; i < n; ++i) { + locations0[changed[i].identifier] = null; + } + var touches = relocate(), now = Date.now(); + if (touches.length === 1) { + if (now - touchtime < 500) { + var p = touches[0]; + zoomTo(that, p, locations0[p.identifier], Math.floor(Math.log(view.k) / Math.LN2) + 1); + d3_eventPreventDefault(); + } + touchtime = now; + } else if (touches.length > 1) { + var p = touches[0], q = touches[1], dx = p[0] - q[0], dy = p[1] - q[1]; + distance0 = dx * dx + dy * dy; + } + } + function moved() { + var touches = d3.touches(that), p0, l0, p1, l1; + d3_selection_interrupt.call(that); + for (var i = 0, n = touches.length; i < n; ++i, l1 = null) { + p1 = touches[i]; + if (l1 = locations0[p1.identifier]) { + if (l0) break; + p0 = p1, l0 = l1; + } + } + if (l1) { + var distance1 = (distance1 = p1[0] - p0[0]) * distance1 + (distance1 = p1[1] - p0[1]) * distance1, scale1 = distance0 && Math.sqrt(distance1 / distance0); + p0 = [ (p0[0] + p1[0]) / 2, (p0[1] + p1[1]) / 2 ]; + l0 = [ (l0[0] + l1[0]) / 2, (l0[1] + l1[1]) / 2 ]; + scaleTo(scale1 * scale0); + } + touchtime = null; + translateTo(p0, l0); + zoomed(dispatch); + } + function ended() { + if (d3.event.touches.length) { + var changed = d3.event.changedTouches; + for (var i = 0, n = changed.length; i < n; ++i) { + delete locations0[changed[i].identifier]; + } + for (var identifier in locations0) { + return void relocate(); + } + } + d3.selectAll(targets).on(zoomName, null); + subject.on(mousedown, mousedowned).on(touchstart, touchstarted); + dragRestore(); + zoomended(dispatch); + } + } + function mousewheeled() { + var dispatch = event.of(this, arguments); + if (mousewheelTimer) clearTimeout(mousewheelTimer); else d3_selection_interrupt.call(this), + translate0 = location(center0 = center || d3.mouse(this)), zoomstarted(dispatch); + mousewheelTimer = setTimeout(function() { + mousewheelTimer = null; + zoomended(dispatch); + }, 50); + d3_eventPreventDefault(); + scaleTo(Math.pow(2, d3_behavior_zoomDelta() * .002) * view.k); + translateTo(center0, translate0); + zoomed(dispatch); + } + function dblclicked() { + var p = d3.mouse(this), k = Math.log(view.k) / Math.LN2; + zoomTo(this, p, location(p), d3.event.shiftKey ? Math.ceil(k) - 1 : Math.floor(k) + 1); + } + return d3.rebind(zoom, event, "on"); + }; + var d3_behavior_zoomInfinity = [ 0, Infinity ], d3_behavior_zoomDelta, d3_behavior_zoomWheel; + d3.color = d3_color; + function d3_color() {} + d3_color.prototype.toString = function() { + return this.rgb() + ""; + }; + d3.hsl = d3_hsl; + function d3_hsl(h, s, l) { + return this instanceof d3_hsl ? void (this.h = +h, this.s = +s, this.l = +l) : arguments.length < 2 ? h instanceof d3_hsl ? new d3_hsl(h.h, h.s, h.l) : d3_rgb_parse("" + h, d3_rgb_hsl, d3_hsl) : new d3_hsl(h, s, l); + } + var d3_hslPrototype = d3_hsl.prototype = new d3_color(); + d3_hslPrototype.brighter = function(k) { + k = Math.pow(.7, arguments.length ? k : 1); + return new d3_hsl(this.h, this.s, this.l / k); + }; + d3_hslPrototype.darker = function(k) { + k = Math.pow(.7, arguments.length ? k : 1); + return new d3_hsl(this.h, this.s, k * this.l); + }; + d3_hslPrototype.rgb = function() { + return d3_hsl_rgb(this.h, this.s, this.l); + }; + function d3_hsl_rgb(h, s, l) { + var m1, m2; + h = isNaN(h) ? 0 : (h %= 360) < 0 ? h + 360 : h; + s = isNaN(s) ? 0 : s < 0 ? 0 : s > 1 ? 1 : s; + l = l < 0 ? 0 : l > 1 ? 1 : l; + m2 = l <= .5 ? l * (1 + s) : l + s - l * s; + m1 = 2 * l - m2; + function v(h) { + if (h > 360) h -= 360; else if (h < 0) h += 360; + if (h < 60) return m1 + (m2 - m1) * h / 60; + if (h < 180) return m2; + if (h < 240) return m1 + (m2 - m1) * (240 - h) / 60; + return m1; + } + function vv(h) { + return Math.round(v(h) * 255); + } + return new d3_rgb(vv(h + 120), vv(h), vv(h - 120)); + } + d3.hcl = d3_hcl; + function d3_hcl(h, c, l) { + return this instanceof d3_hcl ? void (this.h = +h, this.c = +c, this.l = +l) : arguments.length < 2 ? h instanceof d3_hcl ? new d3_hcl(h.h, h.c, h.l) : h instanceof d3_lab ? d3_lab_hcl(h.l, h.a, h.b) : d3_lab_hcl((h = d3_rgb_lab((h = d3.rgb(h)).r, h.g, h.b)).l, h.a, h.b) : new d3_hcl(h, c, l); + } + var d3_hclPrototype = d3_hcl.prototype = new d3_color(); + d3_hclPrototype.brighter = function(k) { + return new d3_hcl(this.h, this.c, Math.min(100, this.l + d3_lab_K * (arguments.length ? k : 1))); + }; + d3_hclPrototype.darker = function(k) { + return new d3_hcl(this.h, this.c, Math.max(0, this.l - d3_lab_K * (arguments.length ? k : 1))); + }; + d3_hclPrototype.rgb = function() { + return d3_hcl_lab(this.h, this.c, this.l).rgb(); + }; + function d3_hcl_lab(h, c, l) { + if (isNaN(h)) h = 0; + if (isNaN(c)) c = 0; + return new d3_lab(l, Math.cos(h *= d3_radians) * c, Math.sin(h) * c); + } + d3.lab = d3_lab; + function d3_lab(l, a, b) { + return this instanceof d3_lab ? void (this.l = +l, this.a = +a, this.b = +b) : arguments.length < 2 ? l instanceof d3_lab ? new d3_lab(l.l, l.a, l.b) : l instanceof d3_hcl ? d3_hcl_lab(l.h, l.c, l.l) : d3_rgb_lab((l = d3_rgb(l)).r, l.g, l.b) : new d3_lab(l, a, b); + } + var d3_lab_K = 18; + var d3_lab_X = .95047, d3_lab_Y = 1, d3_lab_Z = 1.08883; + var d3_labPrototype = d3_lab.prototype = new d3_color(); + d3_labPrototype.brighter = function(k) { + return new d3_lab(Math.min(100, this.l + d3_lab_K * (arguments.length ? k : 1)), this.a, this.b); + }; + d3_labPrototype.darker = function(k) { + return new d3_lab(Math.max(0, this.l - d3_lab_K * (arguments.length ? k : 1)), this.a, this.b); + }; + d3_labPrototype.rgb = function() { + return d3_lab_rgb(this.l, this.a, this.b); + }; + function d3_lab_rgb(l, a, b) { + var y = (l + 16) / 116, x = y + a / 500, z = y - b / 200; + x = d3_lab_xyz(x) * d3_lab_X; + y = d3_lab_xyz(y) * d3_lab_Y; + z = d3_lab_xyz(z) * d3_lab_Z; + return new d3_rgb(d3_xyz_rgb(3.2404542 * x - 1.5371385 * y - .4985314 * z), d3_xyz_rgb(-.969266 * x + 1.8760108 * y + .041556 * z), d3_xyz_rgb(.0556434 * x - .2040259 * y + 1.0572252 * z)); + } + function d3_lab_hcl(l, a, b) { + return l > 0 ? new d3_hcl(Math.atan2(b, a) * d3_degrees, Math.sqrt(a * a + b * b), l) : new d3_hcl(NaN, NaN, l); + } + function d3_lab_xyz(x) { + return x > .206893034 ? x * x * x : (x - 4 / 29) / 7.787037; + } + function d3_xyz_lab(x) { + return x > .008856 ? Math.pow(x, 1 / 3) : 7.787037 * x + 4 / 29; + } + function d3_xyz_rgb(r) { + return Math.round(255 * (r <= .00304 ? 12.92 * r : 1.055 * Math.pow(r, 1 / 2.4) - .055)); + } + d3.rgb = d3_rgb; + function d3_rgb(r, g, b) { + return this instanceof d3_rgb ? void (this.r = ~~r, this.g = ~~g, this.b = ~~b) : arguments.length < 2 ? r instanceof d3_rgb ? new d3_rgb(r.r, r.g, r.b) : d3_rgb_parse("" + r, d3_rgb, d3_hsl_rgb) : new d3_rgb(r, g, b); + } + function d3_rgbNumber(value) { + return new d3_rgb(value >> 16, value >> 8 & 255, value & 255); + } + function d3_rgbString(value) { + return d3_rgbNumber(value) + ""; + } + var d3_rgbPrototype = d3_rgb.prototype = new d3_color(); + d3_rgbPrototype.brighter = function(k) { + k = Math.pow(.7, arguments.length ? k : 1); + var r = this.r, g = this.g, b = this.b, i = 30; + if (!r && !g && !b) return new d3_rgb(i, i, i); + if (r && r < i) r = i; + if (g && g < i) g = i; + if (b && b < i) b = i; + return new d3_rgb(Math.min(255, r / k), Math.min(255, g / k), Math.min(255, b / k)); + }; + d3_rgbPrototype.darker = function(k) { + k = Math.pow(.7, arguments.length ? k : 1); + return new d3_rgb(k * this.r, k * this.g, k * this.b); + }; + d3_rgbPrototype.hsl = function() { + return d3_rgb_hsl(this.r, this.g, this.b); + }; + d3_rgbPrototype.toString = function() { + return "#" + d3_rgb_hex(this.r) + d3_rgb_hex(this.g) + d3_rgb_hex(this.b); + }; + function d3_rgb_hex(v) { + return v < 16 ? "0" + Math.max(0, v).toString(16) : Math.min(255, v).toString(16); + } + function d3_rgb_parse(format, rgb, hsl) { + var r = 0, g = 0, b = 0, m1, m2, color; + m1 = /([a-z]+)\((.*)\)/.exec(format = format.toLowerCase()); + if (m1) { + m2 = m1[2].split(","); + switch (m1[1]) { + case "hsl": + { + return hsl(parseFloat(m2[0]), parseFloat(m2[1]) / 100, parseFloat(m2[2]) / 100); + } + + case "rgb": + { + return rgb(d3_rgb_parseNumber(m2[0]), d3_rgb_parseNumber(m2[1]), d3_rgb_parseNumber(m2[2])); + } + } + } + if (color = d3_rgb_names.get(format)) { + return rgb(color.r, color.g, color.b); + } + if (format != null && format.charAt(0) === "#" && !isNaN(color = parseInt(format.slice(1), 16))) { + if (format.length === 4) { + r = (color & 3840) >> 4; + r = r >> 4 | r; + g = color & 240; + g = g >> 4 | g; + b = color & 15; + b = b << 4 | b; + } else if (format.length === 7) { + r = (color & 16711680) >> 16; + g = (color & 65280) >> 8; + b = color & 255; + } + } + return rgb(r, g, b); + } + function d3_rgb_hsl(r, g, b) { + var min = Math.min(r /= 255, g /= 255, b /= 255), max = Math.max(r, g, b), d = max - min, h, s, l = (max + min) / 2; + if (d) { + s = l < .5 ? d / (max + min) : d / (2 - max - min); + if (r == max) h = (g - b) / d + (g < b ? 6 : 0); else if (g == max) h = (b - r) / d + 2; else h = (r - g) / d + 4; + h *= 60; + } else { + h = NaN; + s = l > 0 && l < 1 ? 0 : h; + } + return new d3_hsl(h, s, l); + } + function d3_rgb_lab(r, g, b) { + r = d3_rgb_xyz(r); + g = d3_rgb_xyz(g); + b = d3_rgb_xyz(b); + var x = d3_xyz_lab((.4124564 * r + .3575761 * g + .1804375 * b) / d3_lab_X), y = d3_xyz_lab((.2126729 * r + .7151522 * g + .072175 * b) / d3_lab_Y), z = d3_xyz_lab((.0193339 * r + .119192 * g + .9503041 * b) / d3_lab_Z); + return d3_lab(116 * y - 16, 500 * (x - y), 200 * (y - z)); + } + function d3_rgb_xyz(r) { + return (r /= 255) <= .04045 ? r / 12.92 : Math.pow((r + .055) / 1.055, 2.4); + } + function d3_rgb_parseNumber(c) { + var f = parseFloat(c); + return c.charAt(c.length - 1) === "%" ? Math.round(f * 2.55) : f; + } + var d3_rgb_names = d3.map({ + aliceblue: 15792383, + antiquewhite: 16444375, + aqua: 65535, + aquamarine: 8388564, + azure: 15794175, + beige: 16119260, + bisque: 16770244, + black: 0, + blanchedalmond: 16772045, + blue: 255, + blueviolet: 9055202, + brown: 10824234, + burlywood: 14596231, + cadetblue: 6266528, + chartreuse: 8388352, + chocolate: 13789470, + coral: 16744272, + cornflowerblue: 6591981, + cornsilk: 16775388, + crimson: 14423100, + cyan: 65535, + darkblue: 139, + darkcyan: 35723, + darkgoldenrod: 12092939, + darkgray: 11119017, + darkgreen: 25600, + darkgrey: 11119017, + darkkhaki: 12433259, + darkmagenta: 9109643, + darkolivegreen: 5597999, + darkorange: 16747520, + darkorchid: 10040012, + darkred: 9109504, + darksalmon: 15308410, + darkseagreen: 9419919, + darkslateblue: 4734347, + darkslategray: 3100495, + darkslategrey: 3100495, + darkturquoise: 52945, + darkviolet: 9699539, + deeppink: 16716947, + deepskyblue: 49151, + dimgray: 6908265, + dimgrey: 6908265, + dodgerblue: 2003199, + firebrick: 11674146, + floralwhite: 16775920, + forestgreen: 2263842, + fuchsia: 16711935, + gainsboro: 14474460, + ghostwhite: 16316671, + gold: 16766720, + goldenrod: 14329120, + gray: 8421504, + green: 32768, + greenyellow: 11403055, + grey: 8421504, + honeydew: 15794160, + hotpink: 16738740, + indianred: 13458524, + indigo: 4915330, + ivory: 16777200, + khaki: 15787660, + lavender: 15132410, + lavenderblush: 16773365, + lawngreen: 8190976, + lemonchiffon: 16775885, + lightblue: 11393254, + lightcoral: 15761536, + lightcyan: 14745599, + lightgoldenrodyellow: 16448210, + lightgray: 13882323, + lightgreen: 9498256, + lightgrey: 13882323, + lightpink: 16758465, + lightsalmon: 16752762, + lightseagreen: 2142890, + lightskyblue: 8900346, + lightslategray: 7833753, + lightslategrey: 7833753, + lightsteelblue: 11584734, + lightyellow: 16777184, + lime: 65280, + limegreen: 3329330, + linen: 16445670, + magenta: 16711935, + maroon: 8388608, + mediumaquamarine: 6737322, + mediumblue: 205, + mediumorchid: 12211667, + mediumpurple: 9662683, + mediumseagreen: 3978097, + mediumslateblue: 8087790, + mediumspringgreen: 64154, + mediumturquoise: 4772300, + mediumvioletred: 13047173, + midnightblue: 1644912, + mintcream: 16121850, + mistyrose: 16770273, + moccasin: 16770229, + navajowhite: 16768685, + navy: 128, + oldlace: 16643558, + olive: 8421376, + olivedrab: 7048739, + orange: 16753920, + orangered: 16729344, + orchid: 14315734, + palegoldenrod: 15657130, + palegreen: 10025880, + paleturquoise: 11529966, + palevioletred: 14381203, + papayawhip: 16773077, + peachpuff: 16767673, + peru: 13468991, + pink: 16761035, + plum: 14524637, + powderblue: 11591910, + purple: 8388736, + rebeccapurple: 6697881, + red: 16711680, + rosybrown: 12357519, + royalblue: 4286945, + saddlebrown: 9127187, + salmon: 16416882, + sandybrown: 16032864, + seagreen: 3050327, + seashell: 16774638, + sienna: 10506797, + silver: 12632256, + skyblue: 8900331, + slateblue: 6970061, + slategray: 7372944, + slategrey: 7372944, + snow: 16775930, + springgreen: 65407, + steelblue: 4620980, + tan: 13808780, + teal: 32896, + thistle: 14204888, + tomato: 16737095, + turquoise: 4251856, + violet: 15631086, + wheat: 16113331, + white: 16777215, + whitesmoke: 16119285, + yellow: 16776960, + yellowgreen: 10145074 + }); + d3_rgb_names.forEach(function(key, value) { + d3_rgb_names.set(key, d3_rgbNumber(value)); + }); + function d3_functor(v) { + return typeof v === "function" ? v : function() { + return v; + }; + } + d3.functor = d3_functor; + d3.xhr = d3_xhrType(d3_identity); + function d3_xhrType(response) { + return function(url, mimeType, callback) { + if (arguments.length === 2 && typeof mimeType === "function") callback = mimeType, + mimeType = null; + return d3_xhr(url, mimeType, response, callback); + }; + } + function d3_xhr(url, mimeType, response, callback) { + var xhr = {}, dispatch = d3.dispatch("beforesend", "progress", "load", "error"), headers = {}, request = new XMLHttpRequest(), responseType = null; + if (this.XDomainRequest && !("withCredentials" in request) && /^(http(s)?:)?\/\//.test(url)) request = new XDomainRequest(); + "onload" in request ? request.onload = request.onerror = respond : request.onreadystatechange = function() { + request.readyState > 3 && respond(); + }; + function respond() { + var status = request.status, result; + if (!status && d3_xhrHasResponse(request) || status >= 200 && status < 300 || status === 304) { + try { + result = response.call(xhr, request); + } catch (e) { + dispatch.error.call(xhr, e); + return; + } + dispatch.load.call(xhr, result); + } else { + dispatch.error.call(xhr, request); + } + } + request.onprogress = function(event) { + var o = d3.event; + d3.event = event; + try { + dispatch.progress.call(xhr, request); + } finally { + d3.event = o; + } + }; + xhr.header = function(name, value) { + name = (name + "").toLowerCase(); + if (arguments.length < 2) return headers[name]; + if (value == null) delete headers[name]; else headers[name] = value + ""; + return xhr; + }; + xhr.mimeType = function(value) { + if (!arguments.length) return mimeType; + mimeType = value == null ? null : value + ""; + return xhr; + }; + xhr.responseType = function(value) { + if (!arguments.length) return responseType; + responseType = value; + return xhr; + }; + xhr.response = function(value) { + response = value; + return xhr; + }; + [ "get", "post" ].forEach(function(method) { + xhr[method] = function() { + return xhr.send.apply(xhr, [ method ].concat(d3_array(arguments))); + }; + }); + xhr.send = function(method, data, callback) { + if (arguments.length === 2 && typeof data === "function") callback = data, data = null; + request.open(method, url, true); + if (mimeType != null && !("accept" in headers)) headers["accept"] = mimeType + ",*/*"; + if (request.setRequestHeader) for (var name in headers) request.setRequestHeader(name, headers[name]); + if (mimeType != null && request.overrideMimeType) request.overrideMimeType(mimeType); + if (responseType != null) request.responseType = responseType; + if (callback != null) xhr.on("error", callback).on("load", function(request) { + callback(null, request); + }); + dispatch.beforesend.call(xhr, request); + request.send(data == null ? null : data); + return xhr; + }; + xhr.abort = function() { + request.abort(); + return xhr; + }; + d3.rebind(xhr, dispatch, "on"); + return callback == null ? xhr : xhr.get(d3_xhr_fixCallback(callback)); + } + function d3_xhr_fixCallback(callback) { + return callback.length === 1 ? function(error, request) { + callback(error == null ? request : null); + } : callback; + } + function d3_xhrHasResponse(request) { + var type = request.responseType; + return type && type !== "text" ? request.response : request.responseText; + } + d3.dsv = function(delimiter, mimeType) { + var reFormat = new RegExp('["' + delimiter + "\n]"), delimiterCode = delimiter.charCodeAt(0); + function dsv(url, row, callback) { + if (arguments.length < 3) callback = row, row = null; + var xhr = d3_xhr(url, mimeType, row == null ? response : typedResponse(row), callback); + xhr.row = function(_) { + return arguments.length ? xhr.response((row = _) == null ? response : typedResponse(_)) : row; + }; + return xhr; + } + function response(request) { + return dsv.parse(request.responseText); + } + function typedResponse(f) { + return function(request) { + return dsv.parse(request.responseText, f); + }; + } + dsv.parse = function(text, f) { + var o; + return dsv.parseRows(text, function(row, i) { + if (o) return o(row, i - 1); + var a = new Function("d", "return {" + row.map(function(name, i) { + return JSON.stringify(name) + ": d[" + i + "]"; + }).join(",") + "}"); + o = f ? function(row, i) { + return f(a(row), i); + } : a; + }); + }; + dsv.parseRows = function(text, f) { + var EOL = {}, EOF = {}, rows = [], N = text.length, I = 0, n = 0, t, eol; + function token() { + if (I >= N) return EOF; + if (eol) return eol = false, EOL; + var j = I; + if (text.charCodeAt(j) === 34) { + var i = j; + while (i++ < N) { + if (text.charCodeAt(i) === 34) { + if (text.charCodeAt(i + 1) !== 34) break; + ++i; + } + } + I = i + 2; + var c = text.charCodeAt(i + 1); + if (c === 13) { + eol = true; + if (text.charCodeAt(i + 2) === 10) ++I; + } else if (c === 10) { + eol = true; + } + return text.slice(j + 1, i).replace(/""/g, '"'); + } + while (I < N) { + var c = text.charCodeAt(I++), k = 1; + if (c === 10) eol = true; else if (c === 13) { + eol = true; + if (text.charCodeAt(I) === 10) ++I, ++k; + } else if (c !== delimiterCode) continue; + return text.slice(j, I - k); + } + return text.slice(j); + } + while ((t = token()) !== EOF) { + var a = []; + while (t !== EOL && t !== EOF) { + a.push(t); + t = token(); + } + if (f && (a = f(a, n++)) == null) continue; + rows.push(a); + } + return rows; + }; + dsv.format = function(rows) { + if (Array.isArray(rows[0])) return dsv.formatRows(rows); + var fieldSet = new d3_Set(), fields = []; + rows.forEach(function(row) { + for (var field in row) { + if (!fieldSet.has(field)) { + fields.push(fieldSet.add(field)); + } + } + }); + return [ fields.map(formatValue).join(delimiter) ].concat(rows.map(function(row) { + return fields.map(function(field) { + return formatValue(row[field]); + }).join(delimiter); + })).join("\n"); + }; + dsv.formatRows = function(rows) { + return rows.map(formatRow).join("\n"); + }; + function formatRow(row) { + return row.map(formatValue).join(delimiter); + } + function formatValue(text) { + return reFormat.test(text) ? '"' + text.replace(/\"/g, '""') + '"' : text; + } + return dsv; + }; + d3.csv = d3.dsv(",", "text/csv"); + d3.tsv = d3.dsv(" ", "text/tab-separated-values"); + var d3_timer_queueHead, d3_timer_queueTail, d3_timer_interval, d3_timer_timeout, d3_timer_active, d3_timer_frame = this[d3_vendorSymbol(this, "requestAnimationFrame")] || function(callback) { + setTimeout(callback, 17); + }; + d3.timer = function(callback, delay, then) { + var n = arguments.length; + if (n < 2) delay = 0; + if (n < 3) then = Date.now(); + var time = then + delay, timer = { + c: callback, + t: time, + f: false, + n: null + }; + if (d3_timer_queueTail) d3_timer_queueTail.n = timer; else d3_timer_queueHead = timer; + d3_timer_queueTail = timer; + if (!d3_timer_interval) { + d3_timer_timeout = clearTimeout(d3_timer_timeout); + d3_timer_interval = 1; + d3_timer_frame(d3_timer_step); + } + }; + function d3_timer_step() { + var now = d3_timer_mark(), delay = d3_timer_sweep() - now; + if (delay > 24) { + if (isFinite(delay)) { + clearTimeout(d3_timer_timeout); + d3_timer_timeout = setTimeout(d3_timer_step, delay); + } + d3_timer_interval = 0; + } else { + d3_timer_interval = 1; + d3_timer_frame(d3_timer_step); + } + } + d3.timer.flush = function() { + d3_timer_mark(); + d3_timer_sweep(); + }; + function d3_timer_mark() { + var now = Date.now(); + d3_timer_active = d3_timer_queueHead; + while (d3_timer_active) { + if (now >= d3_timer_active.t) d3_timer_active.f = d3_timer_active.c(now - d3_timer_active.t); + d3_timer_active = d3_timer_active.n; + } + return now; + } + function d3_timer_sweep() { + var t0, t1 = d3_timer_queueHead, time = Infinity; + while (t1) { + if (t1.f) { + t1 = t0 ? t0.n = t1.n : d3_timer_queueHead = t1.n; + } else { + if (t1.t < time) time = t1.t; + t1 = (t0 = t1).n; + } + } + d3_timer_queueTail = t0; + return time; + } + function d3_format_precision(x, p) { + return p - (x ? Math.ceil(Math.log(x) / Math.LN10) : 1); + } + d3.round = function(x, n) { + return n ? Math.round(x * (n = Math.pow(10, n))) / n : Math.round(x); + }; + var d3_formatPrefixes = [ "y", "z", "a", "f", "p", "n", "µ", "m", "", "k", "M", "G", "T", "P", "E", "Z", "Y" ].map(d3_formatPrefix); + d3.formatPrefix = function(value, precision) { + var i = 0; + if (value) { + if (value < 0) value *= -1; + if (precision) value = d3.round(value, d3_format_precision(value, precision)); + i = 1 + Math.floor(1e-12 + Math.log(value) / Math.LN10); + i = Math.max(-24, Math.min(24, Math.floor((i - 1) / 3) * 3)); + } + return d3_formatPrefixes[8 + i / 3]; + }; + function d3_formatPrefix(d, i) { + var k = Math.pow(10, abs(8 - i) * 3); + return { + scale: i > 8 ? function(d) { + return d / k; + } : function(d) { + return d * k; + }, + symbol: d + }; + } + function d3_locale_numberFormat(locale) { + var locale_decimal = locale.decimal, locale_thousands = locale.thousands, locale_grouping = locale.grouping, locale_currency = locale.currency, formatGroup = locale_grouping && locale_thousands ? function(value, width) { + var i = value.length, t = [], j = 0, g = locale_grouping[0], length = 0; + while (i > 0 && g > 0) { + if (length + g + 1 > width) g = Math.max(1, width - length); + t.push(value.substring(i -= g, i + g)); + if ((length += g + 1) > width) break; + g = locale_grouping[j = (j + 1) % locale_grouping.length]; + } + return t.reverse().join(locale_thousands); + } : d3_identity; + return function(specifier) { + var match = d3_format_re.exec(specifier), fill = match[1] || " ", align = match[2] || ">", sign = match[3] || "-", symbol = match[4] || "", zfill = match[5], width = +match[6], comma = match[7], precision = match[8], type = match[9], scale = 1, prefix = "", suffix = "", integer = false, exponent = true; + if (precision) precision = +precision.substring(1); + if (zfill || fill === "0" && align === "=") { + zfill = fill = "0"; + align = "="; + } + switch (type) { + case "n": + comma = true; + type = "g"; + break; + + case "%": + scale = 100; + suffix = "%"; + type = "f"; + break; + + case "p": + scale = 100; + suffix = "%"; + type = "r"; + break; + + case "b": + case "o": + case "x": + case "X": + if (symbol === "#") prefix = "0" + type.toLowerCase(); + + case "c": + exponent = false; + + case "d": + integer = true; + precision = 0; + break; + + case "s": + scale = -1; + type = "r"; + break; + } + if (symbol === "$") prefix = locale_currency[0], suffix = locale_currency[1]; + if (type == "r" && !precision) type = "g"; + if (precision != null) { + if (type == "g") precision = Math.max(1, Math.min(21, precision)); else if (type == "e" || type == "f") precision = Math.max(0, Math.min(20, precision)); + } + type = d3_format_types.get(type) || d3_format_typeDefault; + var zcomma = zfill && comma; + return function(value) { + var fullSuffix = suffix; + if (integer && value % 1) return ""; + var negative = value < 0 || value === 0 && 1 / value < 0 ? (value = -value, "-") : sign === "-" ? "" : sign; + if (scale < 0) { + var unit = d3.formatPrefix(value, precision); + value = unit.scale(value); + fullSuffix = unit.symbol + suffix; + } else { + value *= scale; + } + value = type(value, precision); + var i = value.lastIndexOf("."), before, after; + if (i < 0) { + var j = exponent ? value.lastIndexOf("e") : -1; + if (j < 0) before = value, after = ""; else before = value.substring(0, j), after = value.substring(j); + } else { + before = value.substring(0, i); + after = locale_decimal + value.substring(i + 1); + } + if (!zfill && comma) before = formatGroup(before, Infinity); + var length = prefix.length + before.length + after.length + (zcomma ? 0 : negative.length), padding = length < width ? new Array(length = width - length + 1).join(fill) : ""; + if (zcomma) before = formatGroup(padding + before, padding.length ? width - after.length : Infinity); + negative += prefix; + value = before + after; + return (align === "<" ? negative + value + padding : align === ">" ? padding + negative + value : align === "^" ? padding.substring(0, length >>= 1) + negative + value + padding.substring(length) : negative + (zcomma ? value : padding + value)) + fullSuffix; + }; + }; + } + var d3_format_re = /(?:([^{])?([<>=^]))?([+\- ])?([$#])?(0)?(\d+)?(,)?(\.-?\d+)?([a-z%])?/i; + var d3_format_types = d3.map({ + b: function(x) { + return x.toString(2); + }, + c: function(x) { + return String.fromCharCode(x); + }, + o: function(x) { + return x.toString(8); + }, + x: function(x) { + return x.toString(16); + }, + X: function(x) { + return x.toString(16).toUpperCase(); + }, + g: function(x, p) { + return x.toPrecision(p); + }, + e: function(x, p) { + return x.toExponential(p); + }, + f: function(x, p) { + return x.toFixed(p); + }, + r: function(x, p) { + return (x = d3.round(x, d3_format_precision(x, p))).toFixed(Math.max(0, Math.min(20, d3_format_precision(x * (1 + 1e-15), p)))); + } + }); + function d3_format_typeDefault(x) { + return x + ""; + } + var d3_time = d3.time = {}, d3_date = Date; + function d3_date_utc() { + this._ = new Date(arguments.length > 1 ? Date.UTC.apply(this, arguments) : arguments[0]); + } + d3_date_utc.prototype = { + getDate: function() { + return this._.getUTCDate(); + }, + getDay: function() { + return this._.getUTCDay(); + }, + getFullYear: function() { + return this._.getUTCFullYear(); + }, + getHours: function() { + return this._.getUTCHours(); + }, + getMilliseconds: function() { + return this._.getUTCMilliseconds(); + }, + getMinutes: function() { + return this._.getUTCMinutes(); + }, + getMonth: function() { + return this._.getUTCMonth(); + }, + getSeconds: function() { + return this._.getUTCSeconds(); + }, + getTime: function() { + return this._.getTime(); + }, + getTimezoneOffset: function() { + return 0; + }, + valueOf: function() { + return this._.valueOf(); + }, + setDate: function() { + d3_time_prototype.setUTCDate.apply(this._, arguments); + }, + setDay: function() { + d3_time_prototype.setUTCDay.apply(this._, arguments); + }, + setFullYear: function() { + d3_time_prototype.setUTCFullYear.apply(this._, arguments); + }, + setHours: function() { + d3_time_prototype.setUTCHours.apply(this._, arguments); + }, + setMilliseconds: function() { + d3_time_prototype.setUTCMilliseconds.apply(this._, arguments); + }, + setMinutes: function() { + d3_time_prototype.setUTCMinutes.apply(this._, arguments); + }, + setMonth: function() { + d3_time_prototype.setUTCMonth.apply(this._, arguments); + }, + setSeconds: function() { + d3_time_prototype.setUTCSeconds.apply(this._, arguments); + }, + setTime: function() { + d3_time_prototype.setTime.apply(this._, arguments); + } + }; + var d3_time_prototype = Date.prototype; + function d3_time_interval(local, step, number) { + function round(date) { + var d0 = local(date), d1 = offset(d0, 1); + return date - d0 < d1 - date ? d0 : d1; + } + function ceil(date) { + step(date = local(new d3_date(date - 1)), 1); + return date; + } + function offset(date, k) { + step(date = new d3_date(+date), k); + return date; + } + function range(t0, t1, dt) { + var time = ceil(t0), times = []; + if (dt > 1) { + while (time < t1) { + if (!(number(time) % dt)) times.push(new Date(+time)); + step(time, 1); + } + } else { + while (time < t1) times.push(new Date(+time)), step(time, 1); + } + return times; + } + function range_utc(t0, t1, dt) { + try { + d3_date = d3_date_utc; + var utc = new d3_date_utc(); + utc._ = t0; + return range(utc, t1, dt); + } finally { + d3_date = Date; + } + } + local.floor = local; + local.round = round; + local.ceil = ceil; + local.offset = offset; + local.range = range; + var utc = local.utc = d3_time_interval_utc(local); + utc.floor = utc; + utc.round = d3_time_interval_utc(round); + utc.ceil = d3_time_interval_utc(ceil); + utc.offset = d3_time_interval_utc(offset); + utc.range = range_utc; + return local; + } + function d3_time_interval_utc(method) { + return function(date, k) { + try { + d3_date = d3_date_utc; + var utc = new d3_date_utc(); + utc._ = date; + return method(utc, k)._; + } finally { + d3_date = Date; + } + }; + } + d3_time.year = d3_time_interval(function(date) { + date = d3_time.day(date); + date.setMonth(0, 1); + return date; + }, function(date, offset) { + date.setFullYear(date.getFullYear() + offset); + }, function(date) { + return date.getFullYear(); + }); + d3_time.years = d3_time.year.range; + d3_time.years.utc = d3_time.year.utc.range; + d3_time.day = d3_time_interval(function(date) { + var day = new d3_date(2e3, 0); + day.setFullYear(date.getFullYear(), date.getMonth(), date.getDate()); + return day; + }, function(date, offset) { + date.setDate(date.getDate() + offset); + }, function(date) { + return date.getDate() - 1; + }); + d3_time.days = d3_time.day.range; + d3_time.days.utc = d3_time.day.utc.range; + d3_time.dayOfYear = function(date) { + var year = d3_time.year(date); + return Math.floor((date - year - (date.getTimezoneOffset() - year.getTimezoneOffset()) * 6e4) / 864e5); + }; + [ "sunday", "monday", "tuesday", "wednesday", "thursday", "friday", "saturday" ].forEach(function(day, i) { + i = 7 - i; + var interval = d3_time[day] = d3_time_interval(function(date) { + (date = d3_time.day(date)).setDate(date.getDate() - (date.getDay() + i) % 7); + return date; + }, function(date, offset) { + date.setDate(date.getDate() + Math.floor(offset) * 7); + }, function(date) { + var day = d3_time.year(date).getDay(); + return Math.floor((d3_time.dayOfYear(date) + (day + i) % 7) / 7) - (day !== i); + }); + d3_time[day + "s"] = interval.range; + d3_time[day + "s"].utc = interval.utc.range; + d3_time[day + "OfYear"] = function(date) { + var day = d3_time.year(date).getDay(); + return Math.floor((d3_time.dayOfYear(date) + (day + i) % 7) / 7); + }; + }); + d3_time.week = d3_time.sunday; + d3_time.weeks = d3_time.sunday.range; + d3_time.weeks.utc = d3_time.sunday.utc.range; + d3_time.weekOfYear = d3_time.sundayOfYear; + function d3_locale_timeFormat(locale) { + var locale_dateTime = locale.dateTime, locale_date = locale.date, locale_time = locale.time, locale_periods = locale.periods, locale_days = locale.days, locale_shortDays = locale.shortDays, locale_months = locale.months, locale_shortMonths = locale.shortMonths; + function d3_time_format(template) { + var n = template.length; + function format(date) { + var string = [], i = -1, j = 0, c, p, f; + while (++i < n) { + if (template.charCodeAt(i) === 37) { + string.push(template.slice(j, i)); + if ((p = d3_time_formatPads[c = template.charAt(++i)]) != null) c = template.charAt(++i); + if (f = d3_time_formats[c]) c = f(date, p == null ? c === "e" ? " " : "0" : p); + string.push(c); + j = i + 1; + } + } + string.push(template.slice(j, i)); + return string.join(""); + } + format.parse = function(string) { + var d = { + y: 1900, + m: 0, + d: 1, + H: 0, + M: 0, + S: 0, + L: 0, + Z: null + }, i = d3_time_parse(d, template, string, 0); + if (i != string.length) return null; + if ("p" in d) d.H = d.H % 12 + d.p * 12; + var localZ = d.Z != null && d3_date !== d3_date_utc, date = new (localZ ? d3_date_utc : d3_date)(); + if ("j" in d) date.setFullYear(d.y, 0, d.j); else if ("w" in d && ("W" in d || "U" in d)) { + date.setFullYear(d.y, 0, 1); + date.setFullYear(d.y, 0, "W" in d ? (d.w + 6) % 7 + d.W * 7 - (date.getDay() + 5) % 7 : d.w + d.U * 7 - (date.getDay() + 6) % 7); + } else date.setFullYear(d.y, d.m, d.d); + date.setHours(d.H + (d.Z / 100 | 0), d.M + d.Z % 100, d.S, d.L); + return localZ ? date._ : date; + }; + format.toString = function() { + return template; + }; + return format; + } + function d3_time_parse(date, template, string, j) { + var c, p, t, i = 0, n = template.length, m = string.length; + while (i < n) { + if (j >= m) return -1; + c = template.charCodeAt(i++); + if (c === 37) { + t = template.charAt(i++); + p = d3_time_parsers[t in d3_time_formatPads ? template.charAt(i++) : t]; + if (!p || (j = p(date, string, j)) < 0) return -1; + } else if (c != string.charCodeAt(j++)) { + return -1; + } + } + return j; + } + d3_time_format.utc = function(template) { + var local = d3_time_format(template); + function format(date) { + try { + d3_date = d3_date_utc; + var utc = new d3_date(); + utc._ = date; + return local(utc); + } finally { + d3_date = Date; + } + } + format.parse = function(string) { + try { + d3_date = d3_date_utc; + var date = local.parse(string); + return date && date._; + } finally { + d3_date = Date; + } + }; + format.toString = local.toString; + return format; + }; + d3_time_format.multi = d3_time_format.utc.multi = d3_time_formatMulti; + var d3_time_periodLookup = d3.map(), d3_time_dayRe = d3_time_formatRe(locale_days), d3_time_dayLookup = d3_time_formatLookup(locale_days), d3_time_dayAbbrevRe = d3_time_formatRe(locale_shortDays), d3_time_dayAbbrevLookup = d3_time_formatLookup(locale_shortDays), d3_time_monthRe = d3_time_formatRe(locale_months), d3_time_monthLookup = d3_time_formatLookup(locale_months), d3_time_monthAbbrevRe = d3_time_formatRe(locale_shortMonths), d3_time_monthAbbrevLookup = d3_time_formatLookup(locale_shortMonths); + locale_periods.forEach(function(p, i) { + d3_time_periodLookup.set(p.toLowerCase(), i); + }); + var d3_time_formats = { + a: function(d) { + return locale_shortDays[d.getDay()]; + }, + A: function(d) { + return locale_days[d.getDay()]; + }, + b: function(d) { + return locale_shortMonths[d.getMonth()]; + }, + B: function(d) { + return locale_months[d.getMonth()]; + }, + c: d3_time_format(locale_dateTime), + d: function(d, p) { + return d3_time_formatPad(d.getDate(), p, 2); + }, + e: function(d, p) { + return d3_time_formatPad(d.getDate(), p, 2); + }, + H: function(d, p) { + return d3_time_formatPad(d.getHours(), p, 2); + }, + I: function(d, p) { + return d3_time_formatPad(d.getHours() % 12 || 12, p, 2); + }, + j: function(d, p) { + return d3_time_formatPad(1 + d3_time.dayOfYear(d), p, 3); + }, + L: function(d, p) { + return d3_time_formatPad(d.getMilliseconds(), p, 3); + }, + m: function(d, p) { + return d3_time_formatPad(d.getMonth() + 1, p, 2); + }, + M: function(d, p) { + return d3_time_formatPad(d.getMinutes(), p, 2); + }, + p: function(d) { + return locale_periods[+(d.getHours() >= 12)]; + }, + S: function(d, p) { + return d3_time_formatPad(d.getSeconds(), p, 2); + }, + U: function(d, p) { + return d3_time_formatPad(d3_time.sundayOfYear(d), p, 2); + }, + w: function(d) { + return d.getDay(); + }, + W: function(d, p) { + return d3_time_formatPad(d3_time.mondayOfYear(d), p, 2); + }, + x: d3_time_format(locale_date), + X: d3_time_format(locale_time), + y: function(d, p) { + return d3_time_formatPad(d.getFullYear() % 100, p, 2); + }, + Y: function(d, p) { + return d3_time_formatPad(d.getFullYear() % 1e4, p, 4); + }, + Z: d3_time_zone, + "%": function() { + return "%"; + } + }; + var d3_time_parsers = { + a: d3_time_parseWeekdayAbbrev, + A: d3_time_parseWeekday, + b: d3_time_parseMonthAbbrev, + B: d3_time_parseMonth, + c: d3_time_parseLocaleFull, + d: d3_time_parseDay, + e: d3_time_parseDay, + H: d3_time_parseHour24, + I: d3_time_parseHour24, + j: d3_time_parseDayOfYear, + L: d3_time_parseMilliseconds, + m: d3_time_parseMonthNumber, + M: d3_time_parseMinutes, + p: d3_time_parseAmPm, + S: d3_time_parseSeconds, + U: d3_time_parseWeekNumberSunday, + w: d3_time_parseWeekdayNumber, + W: d3_time_parseWeekNumberMonday, + x: d3_time_parseLocaleDate, + X: d3_time_parseLocaleTime, + y: d3_time_parseYear, + Y: d3_time_parseFullYear, + Z: d3_time_parseZone, + "%": d3_time_parseLiteralPercent + }; + function d3_time_parseWeekdayAbbrev(date, string, i) { + d3_time_dayAbbrevRe.lastIndex = 0; + var n = d3_time_dayAbbrevRe.exec(string.slice(i)); + return n ? (date.w = d3_time_dayAbbrevLookup.get(n[0].toLowerCase()), i + n[0].length) : -1; + } + function d3_time_parseWeekday(date, string, i) { + d3_time_dayRe.lastIndex = 0; + var n = d3_time_dayRe.exec(string.slice(i)); + return n ? (date.w = d3_time_dayLookup.get(n[0].toLowerCase()), i + n[0].length) : -1; + } + function d3_time_parseMonthAbbrev(date, string, i) { + d3_time_monthAbbrevRe.lastIndex = 0; + var n = d3_time_monthAbbrevRe.exec(string.slice(i)); + return n ? (date.m = d3_time_monthAbbrevLookup.get(n[0].toLowerCase()), i + n[0].length) : -1; + } + function d3_time_parseMonth(date, string, i) { + d3_time_monthRe.lastIndex = 0; + var n = d3_time_monthRe.exec(string.slice(i)); + return n ? (date.m = d3_time_monthLookup.get(n[0].toLowerCase()), i + n[0].length) : -1; + } + function d3_time_parseLocaleFull(date, string, i) { + return d3_time_parse(date, d3_time_formats.c.toString(), string, i); + } + function d3_time_parseLocaleDate(date, string, i) { + return d3_time_parse(date, d3_time_formats.x.toString(), string, i); + } + function d3_time_parseLocaleTime(date, string, i) { + return d3_time_parse(date, d3_time_formats.X.toString(), string, i); + } + function d3_time_parseAmPm(date, string, i) { + var n = d3_time_periodLookup.get(string.slice(i, i += 2).toLowerCase()); + return n == null ? -1 : (date.p = n, i); + } + return d3_time_format; + } + var d3_time_formatPads = { + "-": "", + _: " ", + "0": "0" + }, d3_time_numberRe = /^\s*\d+/, d3_time_percentRe = /^%/; + function d3_time_formatPad(value, fill, width) { + var sign = value < 0 ? "-" : "", string = (sign ? -value : value) + "", length = string.length; + return sign + (length < width ? new Array(width - length + 1).join(fill) + string : string); + } + function d3_time_formatRe(names) { + return new RegExp("^(?:" + names.map(d3.requote).join("|") + ")", "i"); + } + function d3_time_formatLookup(names) { + var map = new d3_Map(), i = -1, n = names.length; + while (++i < n) map.set(names[i].toLowerCase(), i); + return map; + } + function d3_time_parseWeekdayNumber(date, string, i) { + d3_time_numberRe.lastIndex = 0; + var n = d3_time_numberRe.exec(string.slice(i, i + 1)); + return n ? (date.w = +n[0], i + n[0].length) : -1; + } + function d3_time_parseWeekNumberSunday(date, string, i) { + d3_time_numberRe.lastIndex = 0; + var n = d3_time_numberRe.exec(string.slice(i)); + return n ? (date.U = +n[0], i + n[0].length) : -1; + } + function d3_time_parseWeekNumberMonday(date, string, i) { + d3_time_numberRe.lastIndex = 0; + var n = d3_time_numberRe.exec(string.slice(i)); + return n ? (date.W = +n[0], i + n[0].length) : -1; + } + function d3_time_parseFullYear(date, string, i) { + d3_time_numberRe.lastIndex = 0; + var n = d3_time_numberRe.exec(string.slice(i, i + 4)); + return n ? (date.y = +n[0], i + n[0].length) : -1; + } + function d3_time_parseYear(date, string, i) { + d3_time_numberRe.lastIndex = 0; + var n = d3_time_numberRe.exec(string.slice(i, i + 2)); + return n ? (date.y = d3_time_expandYear(+n[0]), i + n[0].length) : -1; + } + function d3_time_parseZone(date, string, i) { + return /^[+-]\d{4}$/.test(string = string.slice(i, i + 5)) ? (date.Z = -string, + i + 5) : -1; + } + function d3_time_expandYear(d) { + return d + (d > 68 ? 1900 : 2e3); + } + function d3_time_parseMonthNumber(date, string, i) { + d3_time_numberRe.lastIndex = 0; + var n = d3_time_numberRe.exec(string.slice(i, i + 2)); + return n ? (date.m = n[0] - 1, i + n[0].length) : -1; + } + function d3_time_parseDay(date, string, i) { + d3_time_numberRe.lastIndex = 0; + var n = d3_time_numberRe.exec(string.slice(i, i + 2)); + return n ? (date.d = +n[0], i + n[0].length) : -1; + } + function d3_time_parseDayOfYear(date, string, i) { + d3_time_numberRe.lastIndex = 0; + var n = d3_time_numberRe.exec(string.slice(i, i + 3)); + return n ? (date.j = +n[0], i + n[0].length) : -1; + } + function d3_time_parseHour24(date, string, i) { + d3_time_numberRe.lastIndex = 0; + var n = d3_time_numberRe.exec(string.slice(i, i + 2)); + return n ? (date.H = +n[0], i + n[0].length) : -1; + } + function d3_time_parseMinutes(date, string, i) { + d3_time_numberRe.lastIndex = 0; + var n = d3_time_numberRe.exec(string.slice(i, i + 2)); + return n ? (date.M = +n[0], i + n[0].length) : -1; + } + function d3_time_parseSeconds(date, string, i) { + d3_time_numberRe.lastIndex = 0; + var n = d3_time_numberRe.exec(string.slice(i, i + 2)); + return n ? (date.S = +n[0], i + n[0].length) : -1; + } + function d3_time_parseMilliseconds(date, string, i) { + d3_time_numberRe.lastIndex = 0; + var n = d3_time_numberRe.exec(string.slice(i, i + 3)); + return n ? (date.L = +n[0], i + n[0].length) : -1; + } + function d3_time_zone(d) { + var z = d.getTimezoneOffset(), zs = z > 0 ? "-" : "+", zh = abs(z) / 60 | 0, zm = abs(z) % 60; + return zs + d3_time_formatPad(zh, "0", 2) + d3_time_formatPad(zm, "0", 2); + } + function d3_time_parseLiteralPercent(date, string, i) { + d3_time_percentRe.lastIndex = 0; + var n = d3_time_percentRe.exec(string.slice(i, i + 1)); + return n ? i + n[0].length : -1; + } + function d3_time_formatMulti(formats) { + var n = formats.length, i = -1; + while (++i < n) formats[i][0] = this(formats[i][0]); + return function(date) { + var i = 0, f = formats[i]; + while (!f[1](date)) f = formats[++i]; + return f[0](date); + }; + } + d3.locale = function(locale) { + return { + numberFormat: d3_locale_numberFormat(locale), + timeFormat: d3_locale_timeFormat(locale) + }; + }; + var d3_locale_enUS = d3.locale({ + decimal: ".", + thousands: ",", + grouping: [ 3 ], + currency: [ "$", "" ], + dateTime: "%a %b %e %X %Y", + date: "%m/%d/%Y", + time: "%H:%M:%S", + periods: [ "AM", "PM" ], + days: [ "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday" ], + shortDays: [ "Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat" ], + months: [ "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December" ], + shortMonths: [ "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" ] + }); + d3.format = d3_locale_enUS.numberFormat; + d3.geo = {}; + function d3_adder() {} + d3_adder.prototype = { + s: 0, + t: 0, + add: function(y) { + d3_adderSum(y, this.t, d3_adderTemp); + d3_adderSum(d3_adderTemp.s, this.s, this); + if (this.s) this.t += d3_adderTemp.t; else this.s = d3_adderTemp.t; + }, + reset: function() { + this.s = this.t = 0; + }, + valueOf: function() { + return this.s; + } + }; + var d3_adderTemp = new d3_adder(); + function d3_adderSum(a, b, o) { + var x = o.s = a + b, bv = x - a, av = x - bv; + o.t = a - av + (b - bv); + } + d3.geo.stream = function(object, listener) { + if (object && d3_geo_streamObjectType.hasOwnProperty(object.type)) { + d3_geo_streamObjectType[object.type](object, listener); + } else { + d3_geo_streamGeometry(object, listener); + } + }; + function d3_geo_streamGeometry(geometry, listener) { + if (geometry && d3_geo_streamGeometryType.hasOwnProperty(geometry.type)) { + d3_geo_streamGeometryType[geometry.type](geometry, listener); + } + } + var d3_geo_streamObjectType = { + Feature: function(feature, listener) { + d3_geo_streamGeometry(feature.geometry, listener); + }, + FeatureCollection: function(object, listener) { + var features = object.features, i = -1, n = features.length; + while (++i < n) d3_geo_streamGeometry(features[i].geometry, listener); + } + }; + var d3_geo_streamGeometryType = { + Sphere: function(object, listener) { + listener.sphere(); + }, + Point: function(object, listener) { + object = object.coordinates; + listener.point(object[0], object[1], object[2]); + }, + MultiPoint: function(object, listener) { + var coordinates = object.coordinates, i = -1, n = coordinates.length; + while (++i < n) object = coordinates[i], listener.point(object[0], object[1], object[2]); + }, + LineString: function(object, listener) { + d3_geo_streamLine(object.coordinates, listener, 0); + }, + MultiLineString: function(object, listener) { + var coordinates = object.coordinates, i = -1, n = coordinates.length; + while (++i < n) d3_geo_streamLine(coordinates[i], listener, 0); + }, + Polygon: function(object, listener) { + d3_geo_streamPolygon(object.coordinates, listener); + }, + MultiPolygon: function(object, listener) { + var coordinates = object.coordinates, i = -1, n = coordinates.length; + while (++i < n) d3_geo_streamPolygon(coordinates[i], listener); + }, + GeometryCollection: function(object, listener) { + var geometries = object.geometries, i = -1, n = geometries.length; + while (++i < n) d3_geo_streamGeometry(geometries[i], listener); + } + }; + function d3_geo_streamLine(coordinates, listener, closed) { + var i = -1, n = coordinates.length - closed, coordinate; + listener.lineStart(); + while (++i < n) coordinate = coordinates[i], listener.point(coordinate[0], coordinate[1], coordinate[2]); + listener.lineEnd(); + } + function d3_geo_streamPolygon(coordinates, listener) { + var i = -1, n = coordinates.length; + listener.polygonStart(); + while (++i < n) d3_geo_streamLine(coordinates[i], listener, 1); + listener.polygonEnd(); + } + d3.geo.area = function(object) { + d3_geo_areaSum = 0; + d3.geo.stream(object, d3_geo_area); + return d3_geo_areaSum; + }; + var d3_geo_areaSum, d3_geo_areaRingSum = new d3_adder(); + var d3_geo_area = { + sphere: function() { + d3_geo_areaSum += 4 * π; + }, + point: d3_noop, + lineStart: d3_noop, + lineEnd: d3_noop, + polygonStart: function() { + d3_geo_areaRingSum.reset(); + d3_geo_area.lineStart = d3_geo_areaRingStart; + }, + polygonEnd: function() { + var area = 2 * d3_geo_areaRingSum; + d3_geo_areaSum += area < 0 ? 4 * π + area : area; + d3_geo_area.lineStart = d3_geo_area.lineEnd = d3_geo_area.point = d3_noop; + } + }; + function d3_geo_areaRingStart() { + var λ00, φ00, λ0, cosφ0, sinφ0; + d3_geo_area.point = function(λ, φ) { + d3_geo_area.point = nextPoint; + λ0 = (λ00 = λ) * d3_radians, cosφ0 = Math.cos(φ = (φ00 = φ) * d3_radians / 2 + π / 4), + sinφ0 = Math.sin(φ); + }; + function nextPoint(λ, φ) { + λ *= d3_radians; + φ = φ * d3_radians / 2 + π / 4; + var dλ = λ - λ0, sdλ = dλ >= 0 ? 1 : -1, adλ = sdλ * dλ, cosφ = Math.cos(φ), sinφ = Math.sin(φ), k = sinφ0 * sinφ, u = cosφ0 * cosφ + k * Math.cos(adλ), v = k * sdλ * Math.sin(adλ); + d3_geo_areaRingSum.add(Math.atan2(v, u)); + λ0 = λ, cosφ0 = cosφ, sinφ0 = sinφ; + } + d3_geo_area.lineEnd = function() { + nextPoint(λ00, φ00); + }; + } + function d3_geo_cartesian(spherical) { + var λ = spherical[0], φ = spherical[1], cosφ = Math.cos(φ); + return [ cosφ * Math.cos(λ), cosφ * Math.sin(λ), Math.sin(φ) ]; + } + function d3_geo_cartesianDot(a, b) { + return a[0] * b[0] + a[1] * b[1] + a[2] * b[2]; + } + function d3_geo_cartesianCross(a, b) { + return [ a[1] * b[2] - a[2] * b[1], a[2] * b[0] - a[0] * b[2], a[0] * b[1] - a[1] * b[0] ]; + } + function d3_geo_cartesianAdd(a, b) { + a[0] += b[0]; + a[1] += b[1]; + a[2] += b[2]; + } + function d3_geo_cartesianScale(vector, k) { + return [ vector[0] * k, vector[1] * k, vector[2] * k ]; + } + function d3_geo_cartesianNormalize(d) { + var l = Math.sqrt(d[0] * d[0] + d[1] * d[1] + d[2] * d[2]); + d[0] /= l; + d[1] /= l; + d[2] /= l; + } + function d3_geo_spherical(cartesian) { + return [ Math.atan2(cartesian[1], cartesian[0]), d3_asin(cartesian[2]) ]; + } + function d3_geo_sphericalEqual(a, b) { + return abs(a[0] - b[0]) < ε && abs(a[1] - b[1]) < ε; + } + d3.geo.bounds = function() { + var λ0, φ0, λ1, φ1, λ_, λ__, φ__, p0, dλSum, ranges, range; + var bound = { + point: point, + lineStart: lineStart, + lineEnd: lineEnd, + polygonStart: function() { + bound.point = ringPoint; + bound.lineStart = ringStart; + bound.lineEnd = ringEnd; + dλSum = 0; + d3_geo_area.polygonStart(); + }, + polygonEnd: function() { + d3_geo_area.polygonEnd(); + bound.point = point; + bound.lineStart = lineStart; + bound.lineEnd = lineEnd; + if (d3_geo_areaRingSum < 0) λ0 = -(λ1 = 180), φ0 = -(φ1 = 90); else if (dλSum > ε) φ1 = 90; else if (dλSum < -ε) φ0 = -90; + range[0] = λ0, range[1] = λ1; + } + }; + function point(λ, φ) { + ranges.push(range = [ λ0 = λ, λ1 = λ ]); + if (φ < φ0) φ0 = φ; + if (φ > φ1) φ1 = φ; + } + function linePoint(λ, φ) { + var p = d3_geo_cartesian([ λ * d3_radians, φ * d3_radians ]); + if (p0) { + var normal = d3_geo_cartesianCross(p0, p), equatorial = [ normal[1], -normal[0], 0 ], inflection = d3_geo_cartesianCross(equatorial, normal); + d3_geo_cartesianNormalize(inflection); + inflection = d3_geo_spherical(inflection); + var dλ = λ - λ_, s = dλ > 0 ? 1 : -1, λi = inflection[0] * d3_degrees * s, antimeridian = abs(dλ) > 180; + if (antimeridian ^ (s * λ_ < λi && λi < s * λ)) { + var φi = inflection[1] * d3_degrees; + if (φi > φ1) φ1 = φi; + } else if (λi = (λi + 360) % 360 - 180, antimeridian ^ (s * λ_ < λi && λi < s * λ)) { + var φi = -inflection[1] * d3_degrees; + if (φi < φ0) φ0 = φi; + } else { + if (φ < φ0) φ0 = φ; + if (φ > φ1) φ1 = φ; + } + if (antimeridian) { + if (λ < λ_) { + if (angle(λ0, λ) > angle(λ0, λ1)) λ1 = λ; + } else { + if (angle(λ, λ1) > angle(λ0, λ1)) λ0 = λ; + } + } else { + if (λ1 >= λ0) { + if (λ < λ0) λ0 = λ; + if (λ > λ1) λ1 = λ; + } else { + if (λ > λ_) { + if (angle(λ0, λ) > angle(λ0, λ1)) λ1 = λ; + } else { + if (angle(λ, λ1) > angle(λ0, λ1)) λ0 = λ; + } + } + } + } else { + point(λ, φ); + } + p0 = p, λ_ = λ; + } + function lineStart() { + bound.point = linePoint; + } + function lineEnd() { + range[0] = λ0, range[1] = λ1; + bound.point = point; + p0 = null; + } + function ringPoint(λ, φ) { + if (p0) { + var dλ = λ - λ_; + dλSum += abs(dλ) > 180 ? dλ + (dλ > 0 ? 360 : -360) : dλ; + } else λ__ = λ, φ__ = φ; + d3_geo_area.point(λ, φ); + linePoint(λ, φ); + } + function ringStart() { + d3_geo_area.lineStart(); + } + function ringEnd() { + ringPoint(λ__, φ__); + d3_geo_area.lineEnd(); + if (abs(dλSum) > ε) λ0 = -(λ1 = 180); + range[0] = λ0, range[1] = λ1; + p0 = null; + } + function angle(λ0, λ1) { + return (λ1 -= λ0) < 0 ? λ1 + 360 : λ1; + } + function compareRanges(a, b) { + return a[0] - b[0]; + } + function withinRange(x, range) { + return range[0] <= range[1] ? range[0] <= x && x <= range[1] : x < range[0] || range[1] < x; + } + return function(feature) { + φ1 = λ1 = -(λ0 = φ0 = Infinity); + ranges = []; + d3.geo.stream(feature, bound); + var n = ranges.length; + if (n) { + ranges.sort(compareRanges); + for (var i = 1, a = ranges[0], b, merged = [ a ]; i < n; ++i) { + b = ranges[i]; + if (withinRange(b[0], a) || withinRange(b[1], a)) { + if (angle(a[0], b[1]) > angle(a[0], a[1])) a[1] = b[1]; + if (angle(b[0], a[1]) > angle(a[0], a[1])) a[0] = b[0]; + } else { + merged.push(a = b); + } + } + var best = -Infinity, dλ; + for (var n = merged.length - 1, i = 0, a = merged[n], b; i <= n; a = b, ++i) { + b = merged[i]; + if ((dλ = angle(a[1], b[0])) > best) best = dλ, λ0 = b[0], λ1 = a[1]; + } + } + ranges = range = null; + return λ0 === Infinity || φ0 === Infinity ? [ [ NaN, NaN ], [ NaN, NaN ] ] : [ [ λ0, φ0 ], [ λ1, φ1 ] ]; + }; + }(); + d3.geo.centroid = function(object) { + d3_geo_centroidW0 = d3_geo_centroidW1 = d3_geo_centroidX0 = d3_geo_centroidY0 = d3_geo_centroidZ0 = d3_geo_centroidX1 = d3_geo_centroidY1 = d3_geo_centroidZ1 = d3_geo_centroidX2 = d3_geo_centroidY2 = d3_geo_centroidZ2 = 0; + d3.geo.stream(object, d3_geo_centroid); + var x = d3_geo_centroidX2, y = d3_geo_centroidY2, z = d3_geo_centroidZ2, m = x * x + y * y + z * z; + if (m < ε2) { + x = d3_geo_centroidX1, y = d3_geo_centroidY1, z = d3_geo_centroidZ1; + if (d3_geo_centroidW1 < ε) x = d3_geo_centroidX0, y = d3_geo_centroidY0, z = d3_geo_centroidZ0; + m = x * x + y * y + z * z; + if (m < ε2) return [ NaN, NaN ]; + } + return [ Math.atan2(y, x) * d3_degrees, d3_asin(z / Math.sqrt(m)) * d3_degrees ]; + }; + var d3_geo_centroidW0, d3_geo_centroidW1, d3_geo_centroidX0, d3_geo_centroidY0, d3_geo_centroidZ0, d3_geo_centroidX1, d3_geo_centroidY1, d3_geo_centroidZ1, d3_geo_centroidX2, d3_geo_centroidY2, d3_geo_centroidZ2; + var d3_geo_centroid = { + sphere: d3_noop, + point: d3_geo_centroidPoint, + lineStart: d3_geo_centroidLineStart, + lineEnd: d3_geo_centroidLineEnd, + polygonStart: function() { + d3_geo_centroid.lineStart = d3_geo_centroidRingStart; + }, + polygonEnd: function() { + d3_geo_centroid.lineStart = d3_geo_centroidLineStart; + } + }; + function d3_geo_centroidPoint(λ, φ) { + λ *= d3_radians; + var cosφ = Math.cos(φ *= d3_radians); + d3_geo_centroidPointXYZ(cosφ * Math.cos(λ), cosφ * Math.sin(λ), Math.sin(φ)); + } + function d3_geo_centroidPointXYZ(x, y, z) { + ++d3_geo_centroidW0; + d3_geo_centroidX0 += (x - d3_geo_centroidX0) / d3_geo_centroidW0; + d3_geo_centroidY0 += (y - d3_geo_centroidY0) / d3_geo_centroidW0; + d3_geo_centroidZ0 += (z - d3_geo_centroidZ0) / d3_geo_centroidW0; + } + function d3_geo_centroidLineStart() { + var x0, y0, z0; + d3_geo_centroid.point = function(λ, φ) { + λ *= d3_radians; + var cosφ = Math.cos(φ *= d3_radians); + x0 = cosφ * Math.cos(λ); + y0 = cosφ * Math.sin(λ); + z0 = Math.sin(φ); + d3_geo_centroid.point = nextPoint; + d3_geo_centroidPointXYZ(x0, y0, z0); + }; + function nextPoint(λ, φ) { + λ *= d3_radians; + var cosφ = Math.cos(φ *= d3_radians), x = cosφ * Math.cos(λ), y = cosφ * Math.sin(λ), z = Math.sin(φ), w = Math.atan2(Math.sqrt((w = y0 * z - z0 * y) * w + (w = z0 * x - x0 * z) * w + (w = x0 * y - y0 * x) * w), x0 * x + y0 * y + z0 * z); + d3_geo_centroidW1 += w; + d3_geo_centroidX1 += w * (x0 + (x0 = x)); + d3_geo_centroidY1 += w * (y0 + (y0 = y)); + d3_geo_centroidZ1 += w * (z0 + (z0 = z)); + d3_geo_centroidPointXYZ(x0, y0, z0); + } + } + function d3_geo_centroidLineEnd() { + d3_geo_centroid.point = d3_geo_centroidPoint; + } + function d3_geo_centroidRingStart() { + var λ00, φ00, x0, y0, z0; + d3_geo_centroid.point = function(λ, φ) { + λ00 = λ, φ00 = φ; + d3_geo_centroid.point = nextPoint; + λ *= d3_radians; + var cosφ = Math.cos(φ *= d3_radians); + x0 = cosφ * Math.cos(λ); + y0 = cosφ * Math.sin(λ); + z0 = Math.sin(φ); + d3_geo_centroidPointXYZ(x0, y0, z0); + }; + d3_geo_centroid.lineEnd = function() { + nextPoint(λ00, φ00); + d3_geo_centroid.lineEnd = d3_geo_centroidLineEnd; + d3_geo_centroid.point = d3_geo_centroidPoint; + }; + function nextPoint(λ, φ) { + λ *= d3_radians; + var cosφ = Math.cos(φ *= d3_radians), x = cosφ * Math.cos(λ), y = cosφ * Math.sin(λ), z = Math.sin(φ), cx = y0 * z - z0 * y, cy = z0 * x - x0 * z, cz = x0 * y - y0 * x, m = Math.sqrt(cx * cx + cy * cy + cz * cz), u = x0 * x + y0 * y + z0 * z, v = m && -d3_acos(u) / m, w = Math.atan2(m, u); + d3_geo_centroidX2 += v * cx; + d3_geo_centroidY2 += v * cy; + d3_geo_centroidZ2 += v * cz; + d3_geo_centroidW1 += w; + d3_geo_centroidX1 += w * (x0 + (x0 = x)); + d3_geo_centroidY1 += w * (y0 + (y0 = y)); + d3_geo_centroidZ1 += w * (z0 + (z0 = z)); + d3_geo_centroidPointXYZ(x0, y0, z0); + } + } + function d3_geo_compose(a, b) { + function compose(x, y) { + return x = a(x, y), b(x[0], x[1]); + } + if (a.invert && b.invert) compose.invert = function(x, y) { + return x = b.invert(x, y), x && a.invert(x[0], x[1]); + }; + return compose; + } + function d3_true() { + return true; + } + function d3_geo_clipPolygon(segments, compare, clipStartInside, interpolate, listener) { + var subject = [], clip = []; + segments.forEach(function(segment) { + if ((n = segment.length - 1) <= 0) return; + var n, p0 = segment[0], p1 = segment[n]; + if (d3_geo_sphericalEqual(p0, p1)) { + listener.lineStart(); + for (var i = 0; i < n; ++i) listener.point((p0 = segment[i])[0], p0[1]); + listener.lineEnd(); + return; + } + var a = new d3_geo_clipPolygonIntersection(p0, segment, null, true), b = new d3_geo_clipPolygonIntersection(p0, null, a, false); + a.o = b; + subject.push(a); + clip.push(b); + a = new d3_geo_clipPolygonIntersection(p1, segment, null, false); + b = new d3_geo_clipPolygonIntersection(p1, null, a, true); + a.o = b; + subject.push(a); + clip.push(b); + }); + clip.sort(compare); + d3_geo_clipPolygonLinkCircular(subject); + d3_geo_clipPolygonLinkCircular(clip); + if (!subject.length) return; + for (var i = 0, entry = clipStartInside, n = clip.length; i < n; ++i) { + clip[i].e = entry = !entry; + } + var start = subject[0], points, point; + while (1) { + var current = start, isSubject = true; + while (current.v) if ((current = current.n) === start) return; + points = current.z; + listener.lineStart(); + do { + current.v = current.o.v = true; + if (current.e) { + if (isSubject) { + for (var i = 0, n = points.length; i < n; ++i) listener.point((point = points[i])[0], point[1]); + } else { + interpolate(current.x, current.n.x, 1, listener); + } + current = current.n; + } else { + if (isSubject) { + points = current.p.z; + for (var i = points.length - 1; i >= 0; --i) listener.point((point = points[i])[0], point[1]); + } else { + interpolate(current.x, current.p.x, -1, listener); + } + current = current.p; + } + current = current.o; + points = current.z; + isSubject = !isSubject; + } while (!current.v); + listener.lineEnd(); + } + } + function d3_geo_clipPolygonLinkCircular(array) { + if (!(n = array.length)) return; + var n, i = 0, a = array[0], b; + while (++i < n) { + a.n = b = array[i]; + b.p = a; + a = b; + } + a.n = b = array[0]; + b.p = a; + } + function d3_geo_clipPolygonIntersection(point, points, other, entry) { + this.x = point; + this.z = points; + this.o = other; + this.e = entry; + this.v = false; + this.n = this.p = null; + } + function d3_geo_clip(pointVisible, clipLine, interpolate, clipStart) { + return function(rotate, listener) { + var line = clipLine(listener), rotatedClipStart = rotate.invert(clipStart[0], clipStart[1]); + var clip = { + point: point, + lineStart: lineStart, + lineEnd: lineEnd, + polygonStart: function() { + clip.point = pointRing; + clip.lineStart = ringStart; + clip.lineEnd = ringEnd; + segments = []; + polygon = []; + }, + polygonEnd: function() { + clip.point = point; + clip.lineStart = lineStart; + clip.lineEnd = lineEnd; + segments = d3.merge(segments); + var clipStartInside = d3_geo_pointInPolygon(rotatedClipStart, polygon); + if (segments.length) { + if (!polygonStarted) listener.polygonStart(), polygonStarted = true; + d3_geo_clipPolygon(segments, d3_geo_clipSort, clipStartInside, interpolate, listener); + } else if (clipStartInside) { + if (!polygonStarted) listener.polygonStart(), polygonStarted = true; + listener.lineStart(); + interpolate(null, null, 1, listener); + listener.lineEnd(); + } + if (polygonStarted) listener.polygonEnd(), polygonStarted = false; + segments = polygon = null; + }, + sphere: function() { + listener.polygonStart(); + listener.lineStart(); + interpolate(null, null, 1, listener); + listener.lineEnd(); + listener.polygonEnd(); + } + }; + function point(λ, φ) { + var point = rotate(λ, φ); + if (pointVisible(λ = point[0], φ = point[1])) listener.point(λ, φ); + } + function pointLine(λ, φ) { + var point = rotate(λ, φ); + line.point(point[0], point[1]); + } + function lineStart() { + clip.point = pointLine; + line.lineStart(); + } + function lineEnd() { + clip.point = point; + line.lineEnd(); + } + var segments; + var buffer = d3_geo_clipBufferListener(), ringListener = clipLine(buffer), polygonStarted = false, polygon, ring; + function pointRing(λ, φ) { + ring.push([ λ, φ ]); + var point = rotate(λ, φ); + ringListener.point(point[0], point[1]); + } + function ringStart() { + ringListener.lineStart(); + ring = []; + } + function ringEnd() { + pointRing(ring[0][0], ring[0][1]); + ringListener.lineEnd(); + var clean = ringListener.clean(), ringSegments = buffer.buffer(), segment, n = ringSegments.length; + ring.pop(); + polygon.push(ring); + ring = null; + if (!n) return; + if (clean & 1) { + segment = ringSegments[0]; + var n = segment.length - 1, i = -1, point; + if (n > 0) { + if (!polygonStarted) listener.polygonStart(), polygonStarted = true; + listener.lineStart(); + while (++i < n) listener.point((point = segment[i])[0], point[1]); + listener.lineEnd(); + } + return; + } + if (n > 1 && clean & 2) ringSegments.push(ringSegments.pop().concat(ringSegments.shift())); + segments.push(ringSegments.filter(d3_geo_clipSegmentLength1)); + } + return clip; + }; + } + function d3_geo_clipSegmentLength1(segment) { + return segment.length > 1; + } + function d3_geo_clipBufferListener() { + var lines = [], line; + return { + lineStart: function() { + lines.push(line = []); + }, + point: function(λ, φ) { + line.push([ λ, φ ]); + }, + lineEnd: d3_noop, + buffer: function() { + var buffer = lines; + lines = []; + line = null; + return buffer; + }, + rejoin: function() { + if (lines.length > 1) lines.push(lines.pop().concat(lines.shift())); + } + }; + } + function d3_geo_clipSort(a, b) { + return ((a = a.x)[0] < 0 ? a[1] - halfπ - ε : halfπ - a[1]) - ((b = b.x)[0] < 0 ? b[1] - halfπ - ε : halfπ - b[1]); + } + var d3_geo_clipAntimeridian = d3_geo_clip(d3_true, d3_geo_clipAntimeridianLine, d3_geo_clipAntimeridianInterpolate, [ -π, -π / 2 ]); + function d3_geo_clipAntimeridianLine(listener) { + var λ0 = NaN, φ0 = NaN, sλ0 = NaN, clean; + return { + lineStart: function() { + listener.lineStart(); + clean = 1; + }, + point: function(λ1, φ1) { + var sλ1 = λ1 > 0 ? π : -π, dλ = abs(λ1 - λ0); + if (abs(dλ - π) < ε) { + listener.point(λ0, φ0 = (φ0 + φ1) / 2 > 0 ? halfπ : -halfπ); + listener.point(sλ0, φ0); + listener.lineEnd(); + listener.lineStart(); + listener.point(sλ1, φ0); + listener.point(λ1, φ0); + clean = 0; + } else if (sλ0 !== sλ1 && dλ >= π) { + if (abs(λ0 - sλ0) < ε) λ0 -= sλ0 * ε; + if (abs(λ1 - sλ1) < ε) λ1 -= sλ1 * ε; + φ0 = d3_geo_clipAntimeridianIntersect(λ0, φ0, λ1, φ1); + listener.point(sλ0, φ0); + listener.lineEnd(); + listener.lineStart(); + listener.point(sλ1, φ0); + clean = 0; + } + listener.point(λ0 = λ1, φ0 = φ1); + sλ0 = sλ1; + }, + lineEnd: function() { + listener.lineEnd(); + λ0 = φ0 = NaN; + }, + clean: function() { + return 2 - clean; + } + }; + } + function d3_geo_clipAntimeridianIntersect(λ0, φ0, λ1, φ1) { + var cosφ0, cosφ1, sinλ0_λ1 = Math.sin(λ0 - λ1); + return abs(sinλ0_λ1) > ε ? Math.atan((Math.sin(φ0) * (cosφ1 = Math.cos(φ1)) * Math.sin(λ1) - Math.sin(φ1) * (cosφ0 = Math.cos(φ0)) * Math.sin(λ0)) / (cosφ0 * cosφ1 * sinλ0_λ1)) : (φ0 + φ1) / 2; + } + function d3_geo_clipAntimeridianInterpolate(from, to, direction, listener) { + var φ; + if (from == null) { + φ = direction * halfπ; + listener.point(-π, φ); + listener.point(0, φ); + listener.point(π, φ); + listener.point(π, 0); + listener.point(π, -φ); + listener.point(0, -φ); + listener.point(-π, -φ); + listener.point(-π, 0); + listener.point(-π, φ); + } else if (abs(from[0] - to[0]) > ε) { + var s = from[0] < to[0] ? π : -π; + φ = direction * s / 2; + listener.point(-s, φ); + listener.point(0, φ); + listener.point(s, φ); + } else { + listener.point(to[0], to[1]); + } + } + function d3_geo_pointInPolygon(point, polygon) { + var meridian = point[0], parallel = point[1], meridianNormal = [ Math.sin(meridian), -Math.cos(meridian), 0 ], polarAngle = 0, winding = 0; + d3_geo_areaRingSum.reset(); + for (var i = 0, n = polygon.length; i < n; ++i) { + var ring = polygon[i], m = ring.length; + if (!m) continue; + var point0 = ring[0], λ0 = point0[0], φ0 = point0[1] / 2 + π / 4, sinφ0 = Math.sin(φ0), cosφ0 = Math.cos(φ0), j = 1; + while (true) { + if (j === m) j = 0; + point = ring[j]; + var λ = point[0], φ = point[1] / 2 + π / 4, sinφ = Math.sin(φ), cosφ = Math.cos(φ), dλ = λ - λ0, sdλ = dλ >= 0 ? 1 : -1, adλ = sdλ * dλ, antimeridian = adλ > π, k = sinφ0 * sinφ; + d3_geo_areaRingSum.add(Math.atan2(k * sdλ * Math.sin(adλ), cosφ0 * cosφ + k * Math.cos(adλ))); + polarAngle += antimeridian ? dλ + sdλ * τ : dλ; + if (antimeridian ^ λ0 >= meridian ^ λ >= meridian) { + var arc = d3_geo_cartesianCross(d3_geo_cartesian(point0), d3_geo_cartesian(point)); + d3_geo_cartesianNormalize(arc); + var intersection = d3_geo_cartesianCross(meridianNormal, arc); + d3_geo_cartesianNormalize(intersection); + var φarc = (antimeridian ^ dλ >= 0 ? -1 : 1) * d3_asin(intersection[2]); + if (parallel > φarc || parallel === φarc && (arc[0] || arc[1])) { + winding += antimeridian ^ dλ >= 0 ? 1 : -1; + } + } + if (!j++) break; + λ0 = λ, sinφ0 = sinφ, cosφ0 = cosφ, point0 = point; + } + } + return (polarAngle < -ε || polarAngle < ε && d3_geo_areaRingSum < 0) ^ winding & 1; + } + function d3_geo_clipCircle(radius) { + var cr = Math.cos(radius), smallRadius = cr > 0, notHemisphere = abs(cr) > ε, interpolate = d3_geo_circleInterpolate(radius, 6 * d3_radians); + return d3_geo_clip(visible, clipLine, interpolate, smallRadius ? [ 0, -radius ] : [ -π, radius - π ]); + function visible(λ, φ) { + return Math.cos(λ) * Math.cos(φ) > cr; + } + function clipLine(listener) { + var point0, c0, v0, v00, clean; + return { + lineStart: function() { + v00 = v0 = false; + clean = 1; + }, + point: function(λ, φ) { + var point1 = [ λ, φ ], point2, v = visible(λ, φ), c = smallRadius ? v ? 0 : code(λ, φ) : v ? code(λ + (λ < 0 ? π : -π), φ) : 0; + if (!point0 && (v00 = v0 = v)) listener.lineStart(); + if (v !== v0) { + point2 = intersect(point0, point1); + if (d3_geo_sphericalEqual(point0, point2) || d3_geo_sphericalEqual(point1, point2)) { + point1[0] += ε; + point1[1] += ε; + v = visible(point1[0], point1[1]); + } + } + if (v !== v0) { + clean = 0; + if (v) { + listener.lineStart(); + point2 = intersect(point1, point0); + listener.point(point2[0], point2[1]); + } else { + point2 = intersect(point0, point1); + listener.point(point2[0], point2[1]); + listener.lineEnd(); + } + point0 = point2; + } else if (notHemisphere && point0 && smallRadius ^ v) { + var t; + if (!(c & c0) && (t = intersect(point1, point0, true))) { + clean = 0; + if (smallRadius) { + listener.lineStart(); + listener.point(t[0][0], t[0][1]); + listener.point(t[1][0], t[1][1]); + listener.lineEnd(); + } else { + listener.point(t[1][0], t[1][1]); + listener.lineEnd(); + listener.lineStart(); + listener.point(t[0][0], t[0][1]); + } + } + } + if (v && (!point0 || !d3_geo_sphericalEqual(point0, point1))) { + listener.point(point1[0], point1[1]); + } + point0 = point1, v0 = v, c0 = c; + }, + lineEnd: function() { + if (v0) listener.lineEnd(); + point0 = null; + }, + clean: function() { + return clean | (v00 && v0) << 1; + } + }; + } + function intersect(a, b, two) { + var pa = d3_geo_cartesian(a), pb = d3_geo_cartesian(b); + var n1 = [ 1, 0, 0 ], n2 = d3_geo_cartesianCross(pa, pb), n2n2 = d3_geo_cartesianDot(n2, n2), n1n2 = n2[0], determinant = n2n2 - n1n2 * n1n2; + if (!determinant) return !two && a; + var c1 = cr * n2n2 / determinant, c2 = -cr * n1n2 / determinant, n1xn2 = d3_geo_cartesianCross(n1, n2), A = d3_geo_cartesianScale(n1, c1), B = d3_geo_cartesianScale(n2, c2); + d3_geo_cartesianAdd(A, B); + var u = n1xn2, w = d3_geo_cartesianDot(A, u), uu = d3_geo_cartesianDot(u, u), t2 = w * w - uu * (d3_geo_cartesianDot(A, A) - 1); + if (t2 < 0) return; + var t = Math.sqrt(t2), q = d3_geo_cartesianScale(u, (-w - t) / uu); + d3_geo_cartesianAdd(q, A); + q = d3_geo_spherical(q); + if (!two) return q; + var λ0 = a[0], λ1 = b[0], φ0 = a[1], φ1 = b[1], z; + if (λ1 < λ0) z = λ0, λ0 = λ1, λ1 = z; + var δλ = λ1 - λ0, polar = abs(δλ - π) < ε, meridian = polar || δλ < ε; + if (!polar && φ1 < φ0) z = φ0, φ0 = φ1, φ1 = z; + if (meridian ? polar ? φ0 + φ1 > 0 ^ q[1] < (abs(q[0] - λ0) < ε ? φ0 : φ1) : φ0 <= q[1] && q[1] <= φ1 : δλ > π ^ (λ0 <= q[0] && q[0] <= λ1)) { + var q1 = d3_geo_cartesianScale(u, (-w + t) / uu); + d3_geo_cartesianAdd(q1, A); + return [ q, d3_geo_spherical(q1) ]; + } + } + function code(λ, φ) { + var r = smallRadius ? radius : π - radius, code = 0; + if (λ < -r) code |= 1; else if (λ > r) code |= 2; + if (φ < -r) code |= 4; else if (φ > r) code |= 8; + return code; + } + } + function d3_geom_clipLine(x0, y0, x1, y1) { + return function(line) { + var a = line.a, b = line.b, ax = a.x, ay = a.y, bx = b.x, by = b.y, t0 = 0, t1 = 1, dx = bx - ax, dy = by - ay, r; + r = x0 - ax; + if (!dx && r > 0) return; + r /= dx; + if (dx < 0) { + if (r < t0) return; + if (r < t1) t1 = r; + } else if (dx > 0) { + if (r > t1) return; + if (r > t0) t0 = r; + } + r = x1 - ax; + if (!dx && r < 0) return; + r /= dx; + if (dx < 0) { + if (r > t1) return; + if (r > t0) t0 = r; + } else if (dx > 0) { + if (r < t0) return; + if (r < t1) t1 = r; + } + r = y0 - ay; + if (!dy && r > 0) return; + r /= dy; + if (dy < 0) { + if (r < t0) return; + if (r < t1) t1 = r; + } else if (dy > 0) { + if (r > t1) return; + if (r > t0) t0 = r; + } + r = y1 - ay; + if (!dy && r < 0) return; + r /= dy; + if (dy < 0) { + if (r > t1) return; + if (r > t0) t0 = r; + } else if (dy > 0) { + if (r < t0) return; + if (r < t1) t1 = r; + } + if (t0 > 0) line.a = { + x: ax + t0 * dx, + y: ay + t0 * dy + }; + if (t1 < 1) line.b = { + x: ax + t1 * dx, + y: ay + t1 * dy + }; + return line; + }; + } + var d3_geo_clipExtentMAX = 1e9; + d3.geo.clipExtent = function() { + var x0, y0, x1, y1, stream, clip, clipExtent = { + stream: function(output) { + if (stream) stream.valid = false; + stream = clip(output); + stream.valid = true; + return stream; + }, + extent: function(_) { + if (!arguments.length) return [ [ x0, y0 ], [ x1, y1 ] ]; + clip = d3_geo_clipExtent(x0 = +_[0][0], y0 = +_[0][1], x1 = +_[1][0], y1 = +_[1][1]); + if (stream) stream.valid = false, stream = null; + return clipExtent; + } + }; + return clipExtent.extent([ [ 0, 0 ], [ 960, 500 ] ]); + }; + function d3_geo_clipExtent(x0, y0, x1, y1) { + return function(listener) { + var listener_ = listener, bufferListener = d3_geo_clipBufferListener(), clipLine = d3_geom_clipLine(x0, y0, x1, y1), segments, polygon, ring; + var clip = { + point: point, + lineStart: lineStart, + lineEnd: lineEnd, + polygonStart: function() { + listener = bufferListener; + segments = []; + polygon = []; + clean = true; + }, + polygonEnd: function() { + listener = listener_; + segments = d3.merge(segments); + var clipStartInside = insidePolygon([ x0, y1 ]), inside = clean && clipStartInside, visible = segments.length; + if (inside || visible) { + listener.polygonStart(); + if (inside) { + listener.lineStart(); + interpolate(null, null, 1, listener); + listener.lineEnd(); + } + if (visible) { + d3_geo_clipPolygon(segments, compare, clipStartInside, interpolate, listener); + } + listener.polygonEnd(); + } + segments = polygon = ring = null; + } + }; + function insidePolygon(p) { + var wn = 0, n = polygon.length, y = p[1]; + for (var i = 0; i < n; ++i) { + for (var j = 1, v = polygon[i], m = v.length, a = v[0], b; j < m; ++j) { + b = v[j]; + if (a[1] <= y) { + if (b[1] > y && d3_cross2d(a, b, p) > 0) ++wn; + } else { + if (b[1] <= y && d3_cross2d(a, b, p) < 0) --wn; + } + a = b; + } + } + return wn !== 0; + } + function interpolate(from, to, direction, listener) { + var a = 0, a1 = 0; + if (from == null || (a = corner(from, direction)) !== (a1 = corner(to, direction)) || comparePoints(from, to) < 0 ^ direction > 0) { + do { + listener.point(a === 0 || a === 3 ? x0 : x1, a > 1 ? y1 : y0); + } while ((a = (a + direction + 4) % 4) !== a1); + } else { + listener.point(to[0], to[1]); + } + } + function pointVisible(x, y) { + return x0 <= x && x <= x1 && y0 <= y && y <= y1; + } + function point(x, y) { + if (pointVisible(x, y)) listener.point(x, y); + } + var x__, y__, v__, x_, y_, v_, first, clean; + function lineStart() { + clip.point = linePoint; + if (polygon) polygon.push(ring = []); + first = true; + v_ = false; + x_ = y_ = NaN; + } + function lineEnd() { + if (segments) { + linePoint(x__, y__); + if (v__ && v_) bufferListener.rejoin(); + segments.push(bufferListener.buffer()); + } + clip.point = point; + if (v_) listener.lineEnd(); + } + function linePoint(x, y) { + x = Math.max(-d3_geo_clipExtentMAX, Math.min(d3_geo_clipExtentMAX, x)); + y = Math.max(-d3_geo_clipExtentMAX, Math.min(d3_geo_clipExtentMAX, y)); + var v = pointVisible(x, y); + if (polygon) ring.push([ x, y ]); + if (first) { + x__ = x, y__ = y, v__ = v; + first = false; + if (v) { + listener.lineStart(); + listener.point(x, y); + } + } else { + if (v && v_) listener.point(x, y); else { + var l = { + a: { + x: x_, + y: y_ + }, + b: { + x: x, + y: y + } + }; + if (clipLine(l)) { + if (!v_) { + listener.lineStart(); + listener.point(l.a.x, l.a.y); + } + listener.point(l.b.x, l.b.y); + if (!v) listener.lineEnd(); + clean = false; + } else if (v) { + listener.lineStart(); + listener.point(x, y); + clean = false; + } + } + } + x_ = x, y_ = y, v_ = v; + } + return clip; + }; + function corner(p, direction) { + return abs(p[0] - x0) < ε ? direction > 0 ? 0 : 3 : abs(p[0] - x1) < ε ? direction > 0 ? 2 : 1 : abs(p[1] - y0) < ε ? direction > 0 ? 1 : 0 : direction > 0 ? 3 : 2; + } + function compare(a, b) { + return comparePoints(a.x, b.x); + } + function comparePoints(a, b) { + var ca = corner(a, 1), cb = corner(b, 1); + return ca !== cb ? ca - cb : ca === 0 ? b[1] - a[1] : ca === 1 ? a[0] - b[0] : ca === 2 ? a[1] - b[1] : b[0] - a[0]; + } + } + function d3_geo_conic(projectAt) { + var φ0 = 0, φ1 = π / 3, m = d3_geo_projectionMutator(projectAt), p = m(φ0, φ1); + p.parallels = function(_) { + if (!arguments.length) return [ φ0 / π * 180, φ1 / π * 180 ]; + return m(φ0 = _[0] * π / 180, φ1 = _[1] * π / 180); + }; + return p; + } + function d3_geo_conicEqualArea(φ0, φ1) { + var sinφ0 = Math.sin(φ0), n = (sinφ0 + Math.sin(φ1)) / 2, C = 1 + sinφ0 * (2 * n - sinφ0), ρ0 = Math.sqrt(C) / n; + function forward(λ, φ) { + var ρ = Math.sqrt(C - 2 * n * Math.sin(φ)) / n; + return [ ρ * Math.sin(λ *= n), ρ0 - ρ * Math.cos(λ) ]; + } + forward.invert = function(x, y) { + var ρ0_y = ρ0 - y; + return [ Math.atan2(x, ρ0_y) / n, d3_asin((C - (x * x + ρ0_y * ρ0_y) * n * n) / (2 * n)) ]; + }; + return forward; + } + (d3.geo.conicEqualArea = function() { + return d3_geo_conic(d3_geo_conicEqualArea); + }).raw = d3_geo_conicEqualArea; + d3.geo.albers = function() { + return d3.geo.conicEqualArea().rotate([ 96, 0 ]).center([ -.6, 38.7 ]).parallels([ 29.5, 45.5 ]).scale(1070); + }; + d3.geo.albersUsa = function() { + var lower48 = d3.geo.albers(); + var alaska = d3.geo.conicEqualArea().rotate([ 154, 0 ]).center([ -2, 58.5 ]).parallels([ 55, 65 ]); + var hawaii = d3.geo.conicEqualArea().rotate([ 157, 0 ]).center([ -3, 19.9 ]).parallels([ 8, 18 ]); + var point, pointStream = { + point: function(x, y) { + point = [ x, y ]; + } + }, lower48Point, alaskaPoint, hawaiiPoint; + function albersUsa(coordinates) { + var x = coordinates[0], y = coordinates[1]; + point = null; + (lower48Point(x, y), point) || (alaskaPoint(x, y), point) || hawaiiPoint(x, y); + return point; + } + albersUsa.invert = function(coordinates) { + var k = lower48.scale(), t = lower48.translate(), x = (coordinates[0] - t[0]) / k, y = (coordinates[1] - t[1]) / k; + return (y >= .12 && y < .234 && x >= -.425 && x < -.214 ? alaska : y >= .166 && y < .234 && x >= -.214 && x < -.115 ? hawaii : lower48).invert(coordinates); + }; + albersUsa.stream = function(stream) { + var lower48Stream = lower48.stream(stream), alaskaStream = alaska.stream(stream), hawaiiStream = hawaii.stream(stream); + return { + point: function(x, y) { + lower48Stream.point(x, y); + alaskaStream.point(x, y); + hawaiiStream.point(x, y); + }, + sphere: function() { + lower48Stream.sphere(); + alaskaStream.sphere(); + hawaiiStream.sphere(); + }, + lineStart: function() { + lower48Stream.lineStart(); + alaskaStream.lineStart(); + hawaiiStream.lineStart(); + }, + lineEnd: function() { + lower48Stream.lineEnd(); + alaskaStream.lineEnd(); + hawaiiStream.lineEnd(); + }, + polygonStart: function() { + lower48Stream.polygonStart(); + alaskaStream.polygonStart(); + hawaiiStream.polygonStart(); + }, + polygonEnd: function() { + lower48Stream.polygonEnd(); + alaskaStream.polygonEnd(); + hawaiiStream.polygonEnd(); + } + }; + }; + albersUsa.precision = function(_) { + if (!arguments.length) return lower48.precision(); + lower48.precision(_); + alaska.precision(_); + hawaii.precision(_); + return albersUsa; + }; + albersUsa.scale = function(_) { + if (!arguments.length) return lower48.scale(); + lower48.scale(_); + alaska.scale(_ * .35); + hawaii.scale(_); + return albersUsa.translate(lower48.translate()); + }; + albersUsa.translate = function(_) { + if (!arguments.length) return lower48.translate(); + var k = lower48.scale(), x = +_[0], y = +_[1]; + lower48Point = lower48.translate(_).clipExtent([ [ x - .455 * k, y - .238 * k ], [ x + .455 * k, y + .238 * k ] ]).stream(pointStream).point; + alaskaPoint = alaska.translate([ x - .307 * k, y + .201 * k ]).clipExtent([ [ x - .425 * k + ε, y + .12 * k + ε ], [ x - .214 * k - ε, y + .234 * k - ε ] ]).stream(pointStream).point; + hawaiiPoint = hawaii.translate([ x - .205 * k, y + .212 * k ]).clipExtent([ [ x - .214 * k + ε, y + .166 * k + ε ], [ x - .115 * k - ε, y + .234 * k - ε ] ]).stream(pointStream).point; + return albersUsa; + }; + return albersUsa.scale(1070); + }; + var d3_geo_pathAreaSum, d3_geo_pathAreaPolygon, d3_geo_pathArea = { + point: d3_noop, + lineStart: d3_noop, + lineEnd: d3_noop, + polygonStart: function() { + d3_geo_pathAreaPolygon = 0; + d3_geo_pathArea.lineStart = d3_geo_pathAreaRingStart; + }, + polygonEnd: function() { + d3_geo_pathArea.lineStart = d3_geo_pathArea.lineEnd = d3_geo_pathArea.point = d3_noop; + d3_geo_pathAreaSum += abs(d3_geo_pathAreaPolygon / 2); + } + }; + function d3_geo_pathAreaRingStart() { + var x00, y00, x0, y0; + d3_geo_pathArea.point = function(x, y) { + d3_geo_pathArea.point = nextPoint; + x00 = x0 = x, y00 = y0 = y; + }; + function nextPoint(x, y) { + d3_geo_pathAreaPolygon += y0 * x - x0 * y; + x0 = x, y0 = y; + } + d3_geo_pathArea.lineEnd = function() { + nextPoint(x00, y00); + }; + } + var d3_geo_pathBoundsX0, d3_geo_pathBoundsY0, d3_geo_pathBoundsX1, d3_geo_pathBoundsY1; + var d3_geo_pathBounds = { + point: d3_geo_pathBoundsPoint, + lineStart: d3_noop, + lineEnd: d3_noop, + polygonStart: d3_noop, + polygonEnd: d3_noop + }; + function d3_geo_pathBoundsPoint(x, y) { + if (x < d3_geo_pathBoundsX0) d3_geo_pathBoundsX0 = x; + if (x > d3_geo_pathBoundsX1) d3_geo_pathBoundsX1 = x; + if (y < d3_geo_pathBoundsY0) d3_geo_pathBoundsY0 = y; + if (y > d3_geo_pathBoundsY1) d3_geo_pathBoundsY1 = y; + } + function d3_geo_pathBuffer() { + var pointCircle = d3_geo_pathBufferCircle(4.5), buffer = []; + var stream = { + point: point, + lineStart: function() { + stream.point = pointLineStart; + }, + lineEnd: lineEnd, + polygonStart: function() { + stream.lineEnd = lineEndPolygon; + }, + polygonEnd: function() { + stream.lineEnd = lineEnd; + stream.point = point; + }, + pointRadius: function(_) { + pointCircle = d3_geo_pathBufferCircle(_); + return stream; + }, + result: function() { + if (buffer.length) { + var result = buffer.join(""); + buffer = []; + return result; + } + } + }; + function point(x, y) { + buffer.push("M", x, ",", y, pointCircle); + } + function pointLineStart(x, y) { + buffer.push("M", x, ",", y); + stream.point = pointLine; + } + function pointLine(x, y) { + buffer.push("L", x, ",", y); + } + function lineEnd() { + stream.point = point; + } + function lineEndPolygon() { + buffer.push("Z"); + } + return stream; + } + function d3_geo_pathBufferCircle(radius) { + return "m0," + radius + "a" + radius + "," + radius + " 0 1,1 0," + -2 * radius + "a" + radius + "," + radius + " 0 1,1 0," + 2 * radius + "z"; + } + var d3_geo_pathCentroid = { + point: d3_geo_pathCentroidPoint, + lineStart: d3_geo_pathCentroidLineStart, + lineEnd: d3_geo_pathCentroidLineEnd, + polygonStart: function() { + d3_geo_pathCentroid.lineStart = d3_geo_pathCentroidRingStart; + }, + polygonEnd: function() { + d3_geo_pathCentroid.point = d3_geo_pathCentroidPoint; + d3_geo_pathCentroid.lineStart = d3_geo_pathCentroidLineStart; + d3_geo_pathCentroid.lineEnd = d3_geo_pathCentroidLineEnd; + } + }; + function d3_geo_pathCentroidPoint(x, y) { + d3_geo_centroidX0 += x; + d3_geo_centroidY0 += y; + ++d3_geo_centroidZ0; + } + function d3_geo_pathCentroidLineStart() { + var x0, y0; + d3_geo_pathCentroid.point = function(x, y) { + d3_geo_pathCentroid.point = nextPoint; + d3_geo_pathCentroidPoint(x0 = x, y0 = y); + }; + function nextPoint(x, y) { + var dx = x - x0, dy = y - y0, z = Math.sqrt(dx * dx + dy * dy); + d3_geo_centroidX1 += z * (x0 + x) / 2; + d3_geo_centroidY1 += z * (y0 + y) / 2; + d3_geo_centroidZ1 += z; + d3_geo_pathCentroidPoint(x0 = x, y0 = y); + } + } + function d3_geo_pathCentroidLineEnd() { + d3_geo_pathCentroid.point = d3_geo_pathCentroidPoint; + } + function d3_geo_pathCentroidRingStart() { + var x00, y00, x0, y0; + d3_geo_pathCentroid.point = function(x, y) { + d3_geo_pathCentroid.point = nextPoint; + d3_geo_pathCentroidPoint(x00 = x0 = x, y00 = y0 = y); + }; + function nextPoint(x, y) { + var dx = x - x0, dy = y - y0, z = Math.sqrt(dx * dx + dy * dy); + d3_geo_centroidX1 += z * (x0 + x) / 2; + d3_geo_centroidY1 += z * (y0 + y) / 2; + d3_geo_centroidZ1 += z; + z = y0 * x - x0 * y; + d3_geo_centroidX2 += z * (x0 + x); + d3_geo_centroidY2 += z * (y0 + y); + d3_geo_centroidZ2 += z * 3; + d3_geo_pathCentroidPoint(x0 = x, y0 = y); + } + d3_geo_pathCentroid.lineEnd = function() { + nextPoint(x00, y00); + }; + } + function d3_geo_pathContext(context) { + var pointRadius = 4.5; + var stream = { + point: point, + lineStart: function() { + stream.point = pointLineStart; + }, + lineEnd: lineEnd, + polygonStart: function() { + stream.lineEnd = lineEndPolygon; + }, + polygonEnd: function() { + stream.lineEnd = lineEnd; + stream.point = point; + }, + pointRadius: function(_) { + pointRadius = _; + return stream; + }, + result: d3_noop + }; + function point(x, y) { + context.moveTo(x + pointRadius, y); + context.arc(x, y, pointRadius, 0, τ); + } + function pointLineStart(x, y) { + context.moveTo(x, y); + stream.point = pointLine; + } + function pointLine(x, y) { + context.lineTo(x, y); + } + function lineEnd() { + stream.point = point; + } + function lineEndPolygon() { + context.closePath(); + } + return stream; + } + function d3_geo_resample(project) { + var δ2 = .5, cosMinDistance = Math.cos(30 * d3_radians), maxDepth = 16; + function resample(stream) { + return (maxDepth ? resampleRecursive : resampleNone)(stream); + } + function resampleNone(stream) { + return d3_geo_transformPoint(stream, function(x, y) { + x = project(x, y); + stream.point(x[0], x[1]); + }); + } + function resampleRecursive(stream) { + var λ00, φ00, x00, y00, a00, b00, c00, λ0, x0, y0, a0, b0, c0; + var resample = { + point: point, + lineStart: lineStart, + lineEnd: lineEnd, + polygonStart: function() { + stream.polygonStart(); + resample.lineStart = ringStart; + }, + polygonEnd: function() { + stream.polygonEnd(); + resample.lineStart = lineStart; + } + }; + function point(x, y) { + x = project(x, y); + stream.point(x[0], x[1]); + } + function lineStart() { + x0 = NaN; + resample.point = linePoint; + stream.lineStart(); + } + function linePoint(λ, φ) { + var c = d3_geo_cartesian([ λ, φ ]), p = project(λ, φ); + resampleLineTo(x0, y0, λ0, a0, b0, c0, x0 = p[0], y0 = p[1], λ0 = λ, a0 = c[0], b0 = c[1], c0 = c[2], maxDepth, stream); + stream.point(x0, y0); + } + function lineEnd() { + resample.point = point; + stream.lineEnd(); + } + function ringStart() { + lineStart(); + resample.point = ringPoint; + resample.lineEnd = ringEnd; + } + function ringPoint(λ, φ) { + linePoint(λ00 = λ, φ00 = φ), x00 = x0, y00 = y0, a00 = a0, b00 = b0, c00 = c0; + resample.point = linePoint; + } + function ringEnd() { + resampleLineTo(x0, y0, λ0, a0, b0, c0, x00, y00, λ00, a00, b00, c00, maxDepth, stream); + resample.lineEnd = lineEnd; + lineEnd(); + } + return resample; + } + function resampleLineTo(x0, y0, λ0, a0, b0, c0, x1, y1, λ1, a1, b1, c1, depth, stream) { + var dx = x1 - x0, dy = y1 - y0, d2 = dx * dx + dy * dy; + if (d2 > 4 * δ2 && depth--) { + var a = a0 + a1, b = b0 + b1, c = c0 + c1, m = Math.sqrt(a * a + b * b + c * c), φ2 = Math.asin(c /= m), λ2 = abs(abs(c) - 1) < ε || abs(λ0 - λ1) < ε ? (λ0 + λ1) / 2 : Math.atan2(b, a), p = project(λ2, φ2), x2 = p[0], y2 = p[1], dx2 = x2 - x0, dy2 = y2 - y0, dz = dy * dx2 - dx * dy2; + if (dz * dz / d2 > δ2 || abs((dx * dx2 + dy * dy2) / d2 - .5) > .3 || a0 * a1 + b0 * b1 + c0 * c1 < cosMinDistance) { + resampleLineTo(x0, y0, λ0, a0, b0, c0, x2, y2, λ2, a /= m, b /= m, c, depth, stream); + stream.point(x2, y2); + resampleLineTo(x2, y2, λ2, a, b, c, x1, y1, λ1, a1, b1, c1, depth, stream); + } + } + } + resample.precision = function(_) { + if (!arguments.length) return Math.sqrt(δ2); + maxDepth = (δ2 = _ * _) > 0 && 16; + return resample; + }; + return resample; + } + d3.geo.path = function() { + var pointRadius = 4.5, projection, context, projectStream, contextStream, cacheStream; + function path(object) { + if (object) { + if (typeof pointRadius === "function") contextStream.pointRadius(+pointRadius.apply(this, arguments)); + if (!cacheStream || !cacheStream.valid) cacheStream = projectStream(contextStream); + d3.geo.stream(object, cacheStream); + } + return contextStream.result(); + } + path.area = function(object) { + d3_geo_pathAreaSum = 0; + d3.geo.stream(object, projectStream(d3_geo_pathArea)); + return d3_geo_pathAreaSum; + }; + path.centroid = function(object) { + d3_geo_centroidX0 = d3_geo_centroidY0 = d3_geo_centroidZ0 = d3_geo_centroidX1 = d3_geo_centroidY1 = d3_geo_centroidZ1 = d3_geo_centroidX2 = d3_geo_centroidY2 = d3_geo_centroidZ2 = 0; + d3.geo.stream(object, projectStream(d3_geo_pathCentroid)); + return d3_geo_centroidZ2 ? [ d3_geo_centroidX2 / d3_geo_centroidZ2, d3_geo_centroidY2 / d3_geo_centroidZ2 ] : d3_geo_centroidZ1 ? [ d3_geo_centroidX1 / d3_geo_centroidZ1, d3_geo_centroidY1 / d3_geo_centroidZ1 ] : d3_geo_centroidZ0 ? [ d3_geo_centroidX0 / d3_geo_centroidZ0, d3_geo_centroidY0 / d3_geo_centroidZ0 ] : [ NaN, NaN ]; + }; + path.bounds = function(object) { + d3_geo_pathBoundsX1 = d3_geo_pathBoundsY1 = -(d3_geo_pathBoundsX0 = d3_geo_pathBoundsY0 = Infinity); + d3.geo.stream(object, projectStream(d3_geo_pathBounds)); + return [ [ d3_geo_pathBoundsX0, d3_geo_pathBoundsY0 ], [ d3_geo_pathBoundsX1, d3_geo_pathBoundsY1 ] ]; + }; + path.projection = function(_) { + if (!arguments.length) return projection; + projectStream = (projection = _) ? _.stream || d3_geo_pathProjectStream(_) : d3_identity; + return reset(); + }; + path.context = function(_) { + if (!arguments.length) return context; + contextStream = (context = _) == null ? new d3_geo_pathBuffer() : new d3_geo_pathContext(_); + if (typeof pointRadius !== "function") contextStream.pointRadius(pointRadius); + return reset(); + }; + path.pointRadius = function(_) { + if (!arguments.length) return pointRadius; + pointRadius = typeof _ === "function" ? _ : (contextStream.pointRadius(+_), +_); + return path; + }; + function reset() { + cacheStream = null; + return path; + } + return path.projection(d3.geo.albersUsa()).context(null); + }; + function d3_geo_pathProjectStream(project) { + var resample = d3_geo_resample(function(x, y) { + return project([ x * d3_degrees, y * d3_degrees ]); + }); + return function(stream) { + return d3_geo_projectionRadians(resample(stream)); + }; + } + d3.geo.transform = function(methods) { + return { + stream: function(stream) { + var transform = new d3_geo_transform(stream); + for (var k in methods) transform[k] = methods[k]; + return transform; + } + }; + }; + function d3_geo_transform(stream) { + this.stream = stream; + } + d3_geo_transform.prototype = { + point: function(x, y) { + this.stream.point(x, y); + }, + sphere: function() { + this.stream.sphere(); + }, + lineStart: function() { + this.stream.lineStart(); + }, + lineEnd: function() { + this.stream.lineEnd(); + }, + polygonStart: function() { + this.stream.polygonStart(); + }, + polygonEnd: function() { + this.stream.polygonEnd(); + } + }; + function d3_geo_transformPoint(stream, point) { + return { + point: point, + sphere: function() { + stream.sphere(); + }, + lineStart: function() { + stream.lineStart(); + }, + lineEnd: function() { + stream.lineEnd(); + }, + polygonStart: function() { + stream.polygonStart(); + }, + polygonEnd: function() { + stream.polygonEnd(); + } + }; + } + d3.geo.projection = d3_geo_projection; + d3.geo.projectionMutator = d3_geo_projectionMutator; + function d3_geo_projection(project) { + return d3_geo_projectionMutator(function() { + return project; + })(); + } + function d3_geo_projectionMutator(projectAt) { + var project, rotate, projectRotate, projectResample = d3_geo_resample(function(x, y) { + x = project(x, y); + return [ x[0] * k + δx, δy - x[1] * k ]; + }), k = 150, x = 480, y = 250, λ = 0, φ = 0, δλ = 0, δφ = 0, δγ = 0, δx, δy, preclip = d3_geo_clipAntimeridian, postclip = d3_identity, clipAngle = null, clipExtent = null, stream; + function projection(point) { + point = projectRotate(point[0] * d3_radians, point[1] * d3_radians); + return [ point[0] * k + δx, δy - point[1] * k ]; + } + function invert(point) { + point = projectRotate.invert((point[0] - δx) / k, (δy - point[1]) / k); + return point && [ point[0] * d3_degrees, point[1] * d3_degrees ]; + } + projection.stream = function(output) { + if (stream) stream.valid = false; + stream = d3_geo_projectionRadians(preclip(rotate, projectResample(postclip(output)))); + stream.valid = true; + return stream; + }; + projection.clipAngle = function(_) { + if (!arguments.length) return clipAngle; + preclip = _ == null ? (clipAngle = _, d3_geo_clipAntimeridian) : d3_geo_clipCircle((clipAngle = +_) * d3_radians); + return invalidate(); + }; + projection.clipExtent = function(_) { + if (!arguments.length) return clipExtent; + clipExtent = _; + postclip = _ ? d3_geo_clipExtent(_[0][0], _[0][1], _[1][0], _[1][1]) : d3_identity; + return invalidate(); + }; + projection.scale = function(_) { + if (!arguments.length) return k; + k = +_; + return reset(); + }; + projection.translate = function(_) { + if (!arguments.length) return [ x, y ]; + x = +_[0]; + y = +_[1]; + return reset(); + }; + projection.center = function(_) { + if (!arguments.length) return [ λ * d3_degrees, φ * d3_degrees ]; + λ = _[0] % 360 * d3_radians; + φ = _[1] % 360 * d3_radians; + return reset(); + }; + projection.rotate = function(_) { + if (!arguments.length) return [ δλ * d3_degrees, δφ * d3_degrees, δγ * d3_degrees ]; + δλ = _[0] % 360 * d3_radians; + δφ = _[1] % 360 * d3_radians; + δγ = _.length > 2 ? _[2] % 360 * d3_radians : 0; + return reset(); + }; + d3.rebind(projection, projectResample, "precision"); + function reset() { + projectRotate = d3_geo_compose(rotate = d3_geo_rotation(δλ, δφ, δγ), project); + var center = project(λ, φ); + δx = x - center[0] * k; + δy = y + center[1] * k; + return invalidate(); + } + function invalidate() { + if (stream) stream.valid = false, stream = null; + return projection; + } + return function() { + project = projectAt.apply(this, arguments); + projection.invert = project.invert && invert; + return reset(); + }; + } + function d3_geo_projectionRadians(stream) { + return d3_geo_transformPoint(stream, function(x, y) { + stream.point(x * d3_radians, y * d3_radians); + }); + } + function d3_geo_equirectangular(λ, φ) { + return [ λ, φ ]; + } + (d3.geo.equirectangular = function() { + return d3_geo_projection(d3_geo_equirectangular); + }).raw = d3_geo_equirectangular.invert = d3_geo_equirectangular; + d3.geo.rotation = function(rotate) { + rotate = d3_geo_rotation(rotate[0] % 360 * d3_radians, rotate[1] * d3_radians, rotate.length > 2 ? rotate[2] * d3_radians : 0); + function forward(coordinates) { + coordinates = rotate(coordinates[0] * d3_radians, coordinates[1] * d3_radians); + return coordinates[0] *= d3_degrees, coordinates[1] *= d3_degrees, coordinates; + } + forward.invert = function(coordinates) { + coordinates = rotate.invert(coordinates[0] * d3_radians, coordinates[1] * d3_radians); + return coordinates[0] *= d3_degrees, coordinates[1] *= d3_degrees, coordinates; + }; + return forward; + }; + function d3_geo_identityRotation(λ, φ) { + return [ λ > π ? λ - τ : λ < -π ? λ + τ : λ, φ ]; + } + d3_geo_identityRotation.invert = d3_geo_equirectangular; + function d3_geo_rotation(δλ, δφ, δγ) { + return δλ ? δφ || δγ ? d3_geo_compose(d3_geo_rotationλ(δλ), d3_geo_rotationφγ(δφ, δγ)) : d3_geo_rotationλ(δλ) : δφ || δγ ? d3_geo_rotationφγ(δφ, δγ) : d3_geo_identityRotation; + } + function d3_geo_forwardRotationλ(δλ) { + return function(λ, φ) { + return λ += δλ, [ λ > π ? λ - τ : λ < -π ? λ + τ : λ, φ ]; + }; + } + function d3_geo_rotationλ(δλ) { + var rotation = d3_geo_forwardRotationλ(δλ); + rotation.invert = d3_geo_forwardRotationλ(-δλ); + return rotation; + } + function d3_geo_rotationφγ(δφ, δγ) { + var cosδφ = Math.cos(δφ), sinδφ = Math.sin(δφ), cosδγ = Math.cos(δγ), sinδγ = Math.sin(δγ); + function rotation(λ, φ) { + var cosφ = Math.cos(φ), x = Math.cos(λ) * cosφ, y = Math.sin(λ) * cosφ, z = Math.sin(φ), k = z * cosδφ + x * sinδφ; + return [ Math.atan2(y * cosδγ - k * sinδγ, x * cosδφ - z * sinδφ), d3_asin(k * cosδγ + y * sinδγ) ]; + } + rotation.invert = function(λ, φ) { + var cosφ = Math.cos(φ), x = Math.cos(λ) * cosφ, y = Math.sin(λ) * cosφ, z = Math.sin(φ), k = z * cosδγ - y * sinδγ; + return [ Math.atan2(y * cosδγ + z * sinδγ, x * cosδφ + k * sinδφ), d3_asin(k * cosδφ - x * sinδφ) ]; + }; + return rotation; + } + d3.geo.circle = function() { + var origin = [ 0, 0 ], angle, precision = 6, interpolate; + function circle() { + var center = typeof origin === "function" ? origin.apply(this, arguments) : origin, rotate = d3_geo_rotation(-center[0] * d3_radians, -center[1] * d3_radians, 0).invert, ring = []; + interpolate(null, null, 1, { + point: function(x, y) { + ring.push(x = rotate(x, y)); + x[0] *= d3_degrees, x[1] *= d3_degrees; + } + }); + return { + type: "Polygon", + coordinates: [ ring ] + }; + } + circle.origin = function(x) { + if (!arguments.length) return origin; + origin = x; + return circle; + }; + circle.angle = function(x) { + if (!arguments.length) return angle; + interpolate = d3_geo_circleInterpolate((angle = +x) * d3_radians, precision * d3_radians); + return circle; + }; + circle.precision = function(_) { + if (!arguments.length) return precision; + interpolate = d3_geo_circleInterpolate(angle * d3_radians, (precision = +_) * d3_radians); + return circle; + }; + return circle.angle(90); + }; + function d3_geo_circleInterpolate(radius, precision) { + var cr = Math.cos(radius), sr = Math.sin(radius); + return function(from, to, direction, listener) { + var step = direction * precision; + if (from != null) { + from = d3_geo_circleAngle(cr, from); + to = d3_geo_circleAngle(cr, to); + if (direction > 0 ? from < to : from > to) from += direction * τ; + } else { + from = radius + direction * τ; + to = radius - .5 * step; + } + for (var point, t = from; direction > 0 ? t > to : t < to; t -= step) { + listener.point((point = d3_geo_spherical([ cr, -sr * Math.cos(t), -sr * Math.sin(t) ]))[0], point[1]); + } + }; + } + function d3_geo_circleAngle(cr, point) { + var a = d3_geo_cartesian(point); + a[0] -= cr; + d3_geo_cartesianNormalize(a); + var angle = d3_acos(-a[1]); + return ((-a[2] < 0 ? -angle : angle) + 2 * Math.PI - ε) % (2 * Math.PI); + } + d3.geo.distance = function(a, b) { + var Δλ = (b[0] - a[0]) * d3_radians, φ0 = a[1] * d3_radians, φ1 = b[1] * d3_radians, sinΔλ = Math.sin(Δλ), cosΔλ = Math.cos(Δλ), sinφ0 = Math.sin(φ0), cosφ0 = Math.cos(φ0), sinφ1 = Math.sin(φ1), cosφ1 = Math.cos(φ1), t; + return Math.atan2(Math.sqrt((t = cosφ1 * sinΔλ) * t + (t = cosφ0 * sinφ1 - sinφ0 * cosφ1 * cosΔλ) * t), sinφ0 * sinφ1 + cosφ0 * cosφ1 * cosΔλ); + }; + d3.geo.graticule = function() { + var x1, x0, X1, X0, y1, y0, Y1, Y0, dx = 10, dy = dx, DX = 90, DY = 360, x, y, X, Y, precision = 2.5; + function graticule() { + return { + type: "MultiLineString", + coordinates: lines() + }; + } + function lines() { + return d3.range(Math.ceil(X0 / DX) * DX, X1, DX).map(X).concat(d3.range(Math.ceil(Y0 / DY) * DY, Y1, DY).map(Y)).concat(d3.range(Math.ceil(x0 / dx) * dx, x1, dx).filter(function(x) { + return abs(x % DX) > ε; + }).map(x)).concat(d3.range(Math.ceil(y0 / dy) * dy, y1, dy).filter(function(y) { + return abs(y % DY) > ε; + }).map(y)); + } + graticule.lines = function() { + return lines().map(function(coordinates) { + return { + type: "LineString", + coordinates: coordinates + }; + }); + }; + graticule.outline = function() { + return { + type: "Polygon", + coordinates: [ X(X0).concat(Y(Y1).slice(1), X(X1).reverse().slice(1), Y(Y0).reverse().slice(1)) ] + }; + }; + graticule.extent = function(_) { + if (!arguments.length) return graticule.minorExtent(); + return graticule.majorExtent(_).minorExtent(_); + }; + graticule.majorExtent = function(_) { + if (!arguments.length) return [ [ X0, Y0 ], [ X1, Y1 ] ]; + X0 = +_[0][0], X1 = +_[1][0]; + Y0 = +_[0][1], Y1 = +_[1][1]; + if (X0 > X1) _ = X0, X0 = X1, X1 = _; + if (Y0 > Y1) _ = Y0, Y0 = Y1, Y1 = _; + return graticule.precision(precision); + }; + graticule.minorExtent = function(_) { + if (!arguments.length) return [ [ x0, y0 ], [ x1, y1 ] ]; + x0 = +_[0][0], x1 = +_[1][0]; + y0 = +_[0][1], y1 = +_[1][1]; + if (x0 > x1) _ = x0, x0 = x1, x1 = _; + if (y0 > y1) _ = y0, y0 = y1, y1 = _; + return graticule.precision(precision); + }; + graticule.step = function(_) { + if (!arguments.length) return graticule.minorStep(); + return graticule.majorStep(_).minorStep(_); + }; + graticule.majorStep = function(_) { + if (!arguments.length) return [ DX, DY ]; + DX = +_[0], DY = +_[1]; + return graticule; + }; + graticule.minorStep = function(_) { + if (!arguments.length) return [ dx, dy ]; + dx = +_[0], dy = +_[1]; + return graticule; + }; + graticule.precision = function(_) { + if (!arguments.length) return precision; + precision = +_; + x = d3_geo_graticuleX(y0, y1, 90); + y = d3_geo_graticuleY(x0, x1, precision); + X = d3_geo_graticuleX(Y0, Y1, 90); + Y = d3_geo_graticuleY(X0, X1, precision); + return graticule; + }; + return graticule.majorExtent([ [ -180, -90 + ε ], [ 180, 90 - ε ] ]).minorExtent([ [ -180, -80 - ε ], [ 180, 80 + ε ] ]); + }; + function d3_geo_graticuleX(y0, y1, dy) { + var y = d3.range(y0, y1 - ε, dy).concat(y1); + return function(x) { + return y.map(function(y) { + return [ x, y ]; + }); + }; + } + function d3_geo_graticuleY(x0, x1, dx) { + var x = d3.range(x0, x1 - ε, dx).concat(x1); + return function(y) { + return x.map(function(x) { + return [ x, y ]; + }); + }; + } + function d3_source(d) { + return d.source; + } + function d3_target(d) { + return d.target; + } + d3.geo.greatArc = function() { + var source = d3_source, source_, target = d3_target, target_; + function greatArc() { + return { + type: "LineString", + coordinates: [ source_ || source.apply(this, arguments), target_ || target.apply(this, arguments) ] + }; + } + greatArc.distance = function() { + return d3.geo.distance(source_ || source.apply(this, arguments), target_ || target.apply(this, arguments)); + }; + greatArc.source = function(_) { + if (!arguments.length) return source; + source = _, source_ = typeof _ === "function" ? null : _; + return greatArc; + }; + greatArc.target = function(_) { + if (!arguments.length) return target; + target = _, target_ = typeof _ === "function" ? null : _; + return greatArc; + }; + greatArc.precision = function() { + return arguments.length ? greatArc : 0; + }; + return greatArc; + }; + d3.geo.interpolate = function(source, target) { + return d3_geo_interpolate(source[0] * d3_radians, source[1] * d3_radians, target[0] * d3_radians, target[1] * d3_radians); + }; + function d3_geo_interpolate(x0, y0, x1, y1) { + var cy0 = Math.cos(y0), sy0 = Math.sin(y0), cy1 = Math.cos(y1), sy1 = Math.sin(y1), kx0 = cy0 * Math.cos(x0), ky0 = cy0 * Math.sin(x0), kx1 = cy1 * Math.cos(x1), ky1 = cy1 * Math.sin(x1), d = 2 * Math.asin(Math.sqrt(d3_haversin(y1 - y0) + cy0 * cy1 * d3_haversin(x1 - x0))), k = 1 / Math.sin(d); + var interpolate = d ? function(t) { + var B = Math.sin(t *= d) * k, A = Math.sin(d - t) * k, x = A * kx0 + B * kx1, y = A * ky0 + B * ky1, z = A * sy0 + B * sy1; + return [ Math.atan2(y, x) * d3_degrees, Math.atan2(z, Math.sqrt(x * x + y * y)) * d3_degrees ]; + } : function() { + return [ x0 * d3_degrees, y0 * d3_degrees ]; + }; + interpolate.distance = d; + return interpolate; + } + d3.geo.length = function(object) { + d3_geo_lengthSum = 0; + d3.geo.stream(object, d3_geo_length); + return d3_geo_lengthSum; + }; + var d3_geo_lengthSum; + var d3_geo_length = { + sphere: d3_noop, + point: d3_noop, + lineStart: d3_geo_lengthLineStart, + lineEnd: d3_noop, + polygonStart: d3_noop, + polygonEnd: d3_noop + }; + function d3_geo_lengthLineStart() { + var λ0, sinφ0, cosφ0; + d3_geo_length.point = function(λ, φ) { + λ0 = λ * d3_radians, sinφ0 = Math.sin(φ *= d3_radians), cosφ0 = Math.cos(φ); + d3_geo_length.point = nextPoint; + }; + d3_geo_length.lineEnd = function() { + d3_geo_length.point = d3_geo_length.lineEnd = d3_noop; + }; + function nextPoint(λ, φ) { + var sinφ = Math.sin(φ *= d3_radians), cosφ = Math.cos(φ), t = abs((λ *= d3_radians) - λ0), cosΔλ = Math.cos(t); + d3_geo_lengthSum += Math.atan2(Math.sqrt((t = cosφ * Math.sin(t)) * t + (t = cosφ0 * sinφ - sinφ0 * cosφ * cosΔλ) * t), sinφ0 * sinφ + cosφ0 * cosφ * cosΔλ); + λ0 = λ, sinφ0 = sinφ, cosφ0 = cosφ; + } + } + function d3_geo_azimuthal(scale, angle) { + function azimuthal(λ, φ) { + var cosλ = Math.cos(λ), cosφ = Math.cos(φ), k = scale(cosλ * cosφ); + return [ k * cosφ * Math.sin(λ), k * Math.sin(φ) ]; + } + azimuthal.invert = function(x, y) { + var ρ = Math.sqrt(x * x + y * y), c = angle(ρ), sinc = Math.sin(c), cosc = Math.cos(c); + return [ Math.atan2(x * sinc, ρ * cosc), Math.asin(ρ && y * sinc / ρ) ]; + }; + return azimuthal; + } + var d3_geo_azimuthalEqualArea = d3_geo_azimuthal(function(cosλcosφ) { + return Math.sqrt(2 / (1 + cosλcosφ)); + }, function(ρ) { + return 2 * Math.asin(ρ / 2); + }); + (d3.geo.azimuthalEqualArea = function() { + return d3_geo_projection(d3_geo_azimuthalEqualArea); + }).raw = d3_geo_azimuthalEqualArea; + var d3_geo_azimuthalEquidistant = d3_geo_azimuthal(function(cosλcosφ) { + var c = Math.acos(cosλcosφ); + return c && c / Math.sin(c); + }, d3_identity); + (d3.geo.azimuthalEquidistant = function() { + return d3_geo_projection(d3_geo_azimuthalEquidistant); + }).raw = d3_geo_azimuthalEquidistant; + function d3_geo_conicConformal(φ0, φ1) { + var cosφ0 = Math.cos(φ0), t = function(φ) { + return Math.tan(π / 4 + φ / 2); + }, n = φ0 === φ1 ? Math.sin(φ0) : Math.log(cosφ0 / Math.cos(φ1)) / Math.log(t(φ1) / t(φ0)), F = cosφ0 * Math.pow(t(φ0), n) / n; + if (!n) return d3_geo_mercator; + function forward(λ, φ) { + if (F > 0) { + if (φ < -halfπ + ε) φ = -halfπ + ε; + } else { + if (φ > halfπ - ε) φ = halfπ - ε; + } + var ρ = F / Math.pow(t(φ), n); + return [ ρ * Math.sin(n * λ), F - ρ * Math.cos(n * λ) ]; + } + forward.invert = function(x, y) { + var ρ0_y = F - y, ρ = d3_sgn(n) * Math.sqrt(x * x + ρ0_y * ρ0_y); + return [ Math.atan2(x, ρ0_y) / n, 2 * Math.atan(Math.pow(F / ρ, 1 / n)) - halfπ ]; + }; + return forward; + } + (d3.geo.conicConformal = function() { + return d3_geo_conic(d3_geo_conicConformal); + }).raw = d3_geo_conicConformal; + function d3_geo_conicEquidistant(φ0, φ1) { + var cosφ0 = Math.cos(φ0), n = φ0 === φ1 ? Math.sin(φ0) : (cosφ0 - Math.cos(φ1)) / (φ1 - φ0), G = cosφ0 / n + φ0; + if (abs(n) < ε) return d3_geo_equirectangular; + function forward(λ, φ) { + var ρ = G - φ; + return [ ρ * Math.sin(n * λ), G - ρ * Math.cos(n * λ) ]; + } + forward.invert = function(x, y) { + var ρ0_y = G - y; + return [ Math.atan2(x, ρ0_y) / n, G - d3_sgn(n) * Math.sqrt(x * x + ρ0_y * ρ0_y) ]; + }; + return forward; + } + (d3.geo.conicEquidistant = function() { + return d3_geo_conic(d3_geo_conicEquidistant); + }).raw = d3_geo_conicEquidistant; + var d3_geo_gnomonic = d3_geo_azimuthal(function(cosλcosφ) { + return 1 / cosλcosφ; + }, Math.atan); + (d3.geo.gnomonic = function() { + return d3_geo_projection(d3_geo_gnomonic); + }).raw = d3_geo_gnomonic; + function d3_geo_mercator(λ, φ) { + return [ λ, Math.log(Math.tan(π / 4 + φ / 2)) ]; + } + d3_geo_mercator.invert = function(x, y) { + return [ x, 2 * Math.atan(Math.exp(y)) - halfπ ]; + }; + function d3_geo_mercatorProjection(project) { + var m = d3_geo_projection(project), scale = m.scale, translate = m.translate, clipExtent = m.clipExtent, clipAuto; + m.scale = function() { + var v = scale.apply(m, arguments); + return v === m ? clipAuto ? m.clipExtent(null) : m : v; + }; + m.translate = function() { + var v = translate.apply(m, arguments); + return v === m ? clipAuto ? m.clipExtent(null) : m : v; + }; + m.clipExtent = function(_) { + var v = clipExtent.apply(m, arguments); + if (v === m) { + if (clipAuto = _ == null) { + var k = π * scale(), t = translate(); + clipExtent([ [ t[0] - k, t[1] - k ], [ t[0] + k, t[1] + k ] ]); + } + } else if (clipAuto) { + v = null; + } + return v; + }; + return m.clipExtent(null); + } + (d3.geo.mercator = function() { + return d3_geo_mercatorProjection(d3_geo_mercator); + }).raw = d3_geo_mercator; + var d3_geo_orthographic = d3_geo_azimuthal(function() { + return 1; + }, Math.asin); + (d3.geo.orthographic = function() { + return d3_geo_projection(d3_geo_orthographic); + }).raw = d3_geo_orthographic; + var d3_geo_stereographic = d3_geo_azimuthal(function(cosλcosφ) { + return 1 / (1 + cosλcosφ); + }, function(ρ) { + return 2 * Math.atan(ρ); + }); + (d3.geo.stereographic = function() { + return d3_geo_projection(d3_geo_stereographic); + }).raw = d3_geo_stereographic; + function d3_geo_transverseMercator(λ, φ) { + return [ Math.log(Math.tan(π / 4 + φ / 2)), -λ ]; + } + d3_geo_transverseMercator.invert = function(x, y) { + return [ -y, 2 * Math.atan(Math.exp(x)) - halfπ ]; + }; + (d3.geo.transverseMercator = function() { + var projection = d3_geo_mercatorProjection(d3_geo_transverseMercator), center = projection.center, rotate = projection.rotate; + projection.center = function(_) { + return _ ? center([ -_[1], _[0] ]) : (_ = center(), [ _[1], -_[0] ]); + }; + projection.rotate = function(_) { + return _ ? rotate([ _[0], _[1], _.length > 2 ? _[2] + 90 : 90 ]) : (_ = rotate(), + [ _[0], _[1], _[2] - 90 ]); + }; + return rotate([ 0, 0, 90 ]); + }).raw = d3_geo_transverseMercator; + d3.geom = {}; + function d3_geom_pointX(d) { + return d[0]; + } + function d3_geom_pointY(d) { + return d[1]; + } + d3.geom.hull = function(vertices) { + var x = d3_geom_pointX, y = d3_geom_pointY; + if (arguments.length) return hull(vertices); + function hull(data) { + if (data.length < 3) return []; + var fx = d3_functor(x), fy = d3_functor(y), i, n = data.length, points = [], flippedPoints = []; + for (i = 0; i < n; i++) { + points.push([ +fx.call(this, data[i], i), +fy.call(this, data[i], i), i ]); + } + points.sort(d3_geom_hullOrder); + for (i = 0; i < n; i++) flippedPoints.push([ points[i][0], -points[i][1] ]); + var upper = d3_geom_hullUpper(points), lower = d3_geom_hullUpper(flippedPoints); + var skipLeft = lower[0] === upper[0], skipRight = lower[lower.length - 1] === upper[upper.length - 1], polygon = []; + for (i = upper.length - 1; i >= 0; --i) polygon.push(data[points[upper[i]][2]]); + for (i = +skipLeft; i < lower.length - skipRight; ++i) polygon.push(data[points[lower[i]][2]]); + return polygon; + } + hull.x = function(_) { + return arguments.length ? (x = _, hull) : x; + }; + hull.y = function(_) { + return arguments.length ? (y = _, hull) : y; + }; + return hull; + }; + function d3_geom_hullUpper(points) { + var n = points.length, hull = [ 0, 1 ], hs = 2; + for (var i = 2; i < n; i++) { + while (hs > 1 && d3_cross2d(points[hull[hs - 2]], points[hull[hs - 1]], points[i]) <= 0) --hs; + hull[hs++] = i; + } + return hull.slice(0, hs); + } + function d3_geom_hullOrder(a, b) { + return a[0] - b[0] || a[1] - b[1]; + } + d3.geom.polygon = function(coordinates) { + d3_subclass(coordinates, d3_geom_polygonPrototype); + return coordinates; + }; + var d3_geom_polygonPrototype = d3.geom.polygon.prototype = []; + d3_geom_polygonPrototype.area = function() { + var i = -1, n = this.length, a, b = this[n - 1], area = 0; + while (++i < n) { + a = b; + b = this[i]; + area += a[1] * b[0] - a[0] * b[1]; + } + return area * .5; + }; + d3_geom_polygonPrototype.centroid = function(k) { + var i = -1, n = this.length, x = 0, y = 0, a, b = this[n - 1], c; + if (!arguments.length) k = -1 / (6 * this.area()); + while (++i < n) { + a = b; + b = this[i]; + c = a[0] * b[1] - b[0] * a[1]; + x += (a[0] + b[0]) * c; + y += (a[1] + b[1]) * c; + } + return [ x * k, y * k ]; + }; + d3_geom_polygonPrototype.clip = function(subject) { + var input, closed = d3_geom_polygonClosed(subject), i = -1, n = this.length - d3_geom_polygonClosed(this), j, m, a = this[n - 1], b, c, d; + while (++i < n) { + input = subject.slice(); + subject.length = 0; + b = this[i]; + c = input[(m = input.length - closed) - 1]; + j = -1; + while (++j < m) { + d = input[j]; + if (d3_geom_polygonInside(d, a, b)) { + if (!d3_geom_polygonInside(c, a, b)) { + subject.push(d3_geom_polygonIntersect(c, d, a, b)); + } + subject.push(d); + } else if (d3_geom_polygonInside(c, a, b)) { + subject.push(d3_geom_polygonIntersect(c, d, a, b)); + } + c = d; + } + if (closed) subject.push(subject[0]); + a = b; + } + return subject; + }; + function d3_geom_polygonInside(p, a, b) { + return (b[0] - a[0]) * (p[1] - a[1]) < (b[1] - a[1]) * (p[0] - a[0]); + } + function d3_geom_polygonIntersect(c, d, a, b) { + var x1 = c[0], x3 = a[0], x21 = d[0] - x1, x43 = b[0] - x3, y1 = c[1], y3 = a[1], y21 = d[1] - y1, y43 = b[1] - y3, ua = (x43 * (y1 - y3) - y43 * (x1 - x3)) / (y43 * x21 - x43 * y21); + return [ x1 + ua * x21, y1 + ua * y21 ]; + } + function d3_geom_polygonClosed(coordinates) { + var a = coordinates[0], b = coordinates[coordinates.length - 1]; + return !(a[0] - b[0] || a[1] - b[1]); + } + var d3_geom_voronoiEdges, d3_geom_voronoiCells, d3_geom_voronoiBeaches, d3_geom_voronoiBeachPool = [], d3_geom_voronoiFirstCircle, d3_geom_voronoiCircles, d3_geom_voronoiCirclePool = []; + function d3_geom_voronoiBeach() { + d3_geom_voronoiRedBlackNode(this); + this.edge = this.site = this.circle = null; + } + function d3_geom_voronoiCreateBeach(site) { + var beach = d3_geom_voronoiBeachPool.pop() || new d3_geom_voronoiBeach(); + beach.site = site; + return beach; + } + function d3_geom_voronoiDetachBeach(beach) { + d3_geom_voronoiDetachCircle(beach); + d3_geom_voronoiBeaches.remove(beach); + d3_geom_voronoiBeachPool.push(beach); + d3_geom_voronoiRedBlackNode(beach); + } + function d3_geom_voronoiRemoveBeach(beach) { + var circle = beach.circle, x = circle.x, y = circle.cy, vertex = { + x: x, + y: y + }, previous = beach.P, next = beach.N, disappearing = [ beach ]; + d3_geom_voronoiDetachBeach(beach); + var lArc = previous; + while (lArc.circle && abs(x - lArc.circle.x) < ε && abs(y - lArc.circle.cy) < ε) { + previous = lArc.P; + disappearing.unshift(lArc); + d3_geom_voronoiDetachBeach(lArc); + lArc = previous; + } + disappearing.unshift(lArc); + d3_geom_voronoiDetachCircle(lArc); + var rArc = next; + while (rArc.circle && abs(x - rArc.circle.x) < ε && abs(y - rArc.circle.cy) < ε) { + next = rArc.N; + disappearing.push(rArc); + d3_geom_voronoiDetachBeach(rArc); + rArc = next; + } + disappearing.push(rArc); + d3_geom_voronoiDetachCircle(rArc); + var nArcs = disappearing.length, iArc; + for (iArc = 1; iArc < nArcs; ++iArc) { + rArc = disappearing[iArc]; + lArc = disappearing[iArc - 1]; + d3_geom_voronoiSetEdgeEnd(rArc.edge, lArc.site, rArc.site, vertex); + } + lArc = disappearing[0]; + rArc = disappearing[nArcs - 1]; + rArc.edge = d3_geom_voronoiCreateEdge(lArc.site, rArc.site, null, vertex); + d3_geom_voronoiAttachCircle(lArc); + d3_geom_voronoiAttachCircle(rArc); + } + function d3_geom_voronoiAddBeach(site) { + var x = site.x, directrix = site.y, lArc, rArc, dxl, dxr, node = d3_geom_voronoiBeaches._; + while (node) { + dxl = d3_geom_voronoiLeftBreakPoint(node, directrix) - x; + if (dxl > ε) node = node.L; else { + dxr = x - d3_geom_voronoiRightBreakPoint(node, directrix); + if (dxr > ε) { + if (!node.R) { + lArc = node; + break; + } + node = node.R; + } else { + if (dxl > -ε) { + lArc = node.P; + rArc = node; + } else if (dxr > -ε) { + lArc = node; + rArc = node.N; + } else { + lArc = rArc = node; + } + break; + } + } + } + var newArc = d3_geom_voronoiCreateBeach(site); + d3_geom_voronoiBeaches.insert(lArc, newArc); + if (!lArc && !rArc) return; + if (lArc === rArc) { + d3_geom_voronoiDetachCircle(lArc); + rArc = d3_geom_voronoiCreateBeach(lArc.site); + d3_geom_voronoiBeaches.insert(newArc, rArc); + newArc.edge = rArc.edge = d3_geom_voronoiCreateEdge(lArc.site, newArc.site); + d3_geom_voronoiAttachCircle(lArc); + d3_geom_voronoiAttachCircle(rArc); + return; + } + if (!rArc) { + newArc.edge = d3_geom_voronoiCreateEdge(lArc.site, newArc.site); + return; + } + d3_geom_voronoiDetachCircle(lArc); + d3_geom_voronoiDetachCircle(rArc); + var lSite = lArc.site, ax = lSite.x, ay = lSite.y, bx = site.x - ax, by = site.y - ay, rSite = rArc.site, cx = rSite.x - ax, cy = rSite.y - ay, d = 2 * (bx * cy - by * cx), hb = bx * bx + by * by, hc = cx * cx + cy * cy, vertex = { + x: (cy * hb - by * hc) / d + ax, + y: (bx * hc - cx * hb) / d + ay + }; + d3_geom_voronoiSetEdgeEnd(rArc.edge, lSite, rSite, vertex); + newArc.edge = d3_geom_voronoiCreateEdge(lSite, site, null, vertex); + rArc.edge = d3_geom_voronoiCreateEdge(site, rSite, null, vertex); + d3_geom_voronoiAttachCircle(lArc); + d3_geom_voronoiAttachCircle(rArc); + } + function d3_geom_voronoiLeftBreakPoint(arc, directrix) { + var site = arc.site, rfocx = site.x, rfocy = site.y, pby2 = rfocy - directrix; + if (!pby2) return rfocx; + var lArc = arc.P; + if (!lArc) return -Infinity; + site = lArc.site; + var lfocx = site.x, lfocy = site.y, plby2 = lfocy - directrix; + if (!plby2) return lfocx; + var hl = lfocx - rfocx, aby2 = 1 / pby2 - 1 / plby2, b = hl / plby2; + if (aby2) return (-b + Math.sqrt(b * b - 2 * aby2 * (hl * hl / (-2 * plby2) - lfocy + plby2 / 2 + rfocy - pby2 / 2))) / aby2 + rfocx; + return (rfocx + lfocx) / 2; + } + function d3_geom_voronoiRightBreakPoint(arc, directrix) { + var rArc = arc.N; + if (rArc) return d3_geom_voronoiLeftBreakPoint(rArc, directrix); + var site = arc.site; + return site.y === directrix ? site.x : Infinity; + } + function d3_geom_voronoiCell(site) { + this.site = site; + this.edges = []; + } + d3_geom_voronoiCell.prototype.prepare = function() { + var halfEdges = this.edges, iHalfEdge = halfEdges.length, edge; + while (iHalfEdge--) { + edge = halfEdges[iHalfEdge].edge; + if (!edge.b || !edge.a) halfEdges.splice(iHalfEdge, 1); + } + halfEdges.sort(d3_geom_voronoiHalfEdgeOrder); + return halfEdges.length; + }; + function d3_geom_voronoiCloseCells(extent) { + var x0 = extent[0][0], x1 = extent[1][0], y0 = extent[0][1], y1 = extent[1][1], x2, y2, x3, y3, cells = d3_geom_voronoiCells, iCell = cells.length, cell, iHalfEdge, halfEdges, nHalfEdges, start, end; + while (iCell--) { + cell = cells[iCell]; + if (!cell || !cell.prepare()) continue; + halfEdges = cell.edges; + nHalfEdges = halfEdges.length; + iHalfEdge = 0; + while (iHalfEdge < nHalfEdges) { + end = halfEdges[iHalfEdge].end(), x3 = end.x, y3 = end.y; + start = halfEdges[++iHalfEdge % nHalfEdges].start(), x2 = start.x, y2 = start.y; + if (abs(x3 - x2) > ε || abs(y3 - y2) > ε) { + halfEdges.splice(iHalfEdge, 0, new d3_geom_voronoiHalfEdge(d3_geom_voronoiCreateBorderEdge(cell.site, end, abs(x3 - x0) < ε && y1 - y3 > ε ? { + x: x0, + y: abs(x2 - x0) < ε ? y2 : y1 + } : abs(y3 - y1) < ε && x1 - x3 > ε ? { + x: abs(y2 - y1) < ε ? x2 : x1, + y: y1 + } : abs(x3 - x1) < ε && y3 - y0 > ε ? { + x: x1, + y: abs(x2 - x1) < ε ? y2 : y0 + } : abs(y3 - y0) < ε && x3 - x0 > ε ? { + x: abs(y2 - y0) < ε ? x2 : x0, + y: y0 + } : null), cell.site, null)); + ++nHalfEdges; + } + } + } + } + function d3_geom_voronoiHalfEdgeOrder(a, b) { + return b.angle - a.angle; + } + function d3_geom_voronoiCircle() { + d3_geom_voronoiRedBlackNode(this); + this.x = this.y = this.arc = this.site = this.cy = null; + } + function d3_geom_voronoiAttachCircle(arc) { + var lArc = arc.P, rArc = arc.N; + if (!lArc || !rArc) return; + var lSite = lArc.site, cSite = arc.site, rSite = rArc.site; + if (lSite === rSite) return; + var bx = cSite.x, by = cSite.y, ax = lSite.x - bx, ay = lSite.y - by, cx = rSite.x - bx, cy = rSite.y - by; + var d = 2 * (ax * cy - ay * cx); + if (d >= -ε2) return; + var ha = ax * ax + ay * ay, hc = cx * cx + cy * cy, x = (cy * ha - ay * hc) / d, y = (ax * hc - cx * ha) / d, cy = y + by; + var circle = d3_geom_voronoiCirclePool.pop() || new d3_geom_voronoiCircle(); + circle.arc = arc; + circle.site = cSite; + circle.x = x + bx; + circle.y = cy + Math.sqrt(x * x + y * y); + circle.cy = cy; + arc.circle = circle; + var before = null, node = d3_geom_voronoiCircles._; + while (node) { + if (circle.y < node.y || circle.y === node.y && circle.x <= node.x) { + if (node.L) node = node.L; else { + before = node.P; + break; + } + } else { + if (node.R) node = node.R; else { + before = node; + break; + } + } + } + d3_geom_voronoiCircles.insert(before, circle); + if (!before) d3_geom_voronoiFirstCircle = circle; + } + function d3_geom_voronoiDetachCircle(arc) { + var circle = arc.circle; + if (circle) { + if (!circle.P) d3_geom_voronoiFirstCircle = circle.N; + d3_geom_voronoiCircles.remove(circle); + d3_geom_voronoiCirclePool.push(circle); + d3_geom_voronoiRedBlackNode(circle); + arc.circle = null; + } + } + function d3_geom_voronoiClipEdges(extent) { + var edges = d3_geom_voronoiEdges, clip = d3_geom_clipLine(extent[0][0], extent[0][1], extent[1][0], extent[1][1]), i = edges.length, e; + while (i--) { + e = edges[i]; + if (!d3_geom_voronoiConnectEdge(e, extent) || !clip(e) || abs(e.a.x - e.b.x) < ε && abs(e.a.y - e.b.y) < ε) { + e.a = e.b = null; + edges.splice(i, 1); + } + } + } + function d3_geom_voronoiConnectEdge(edge, extent) { + var vb = edge.b; + if (vb) return true; + var va = edge.a, x0 = extent[0][0], x1 = extent[1][0], y0 = extent[0][1], y1 = extent[1][1], lSite = edge.l, rSite = edge.r, lx = lSite.x, ly = lSite.y, rx = rSite.x, ry = rSite.y, fx = (lx + rx) / 2, fy = (ly + ry) / 2, fm, fb; + if (ry === ly) { + if (fx < x0 || fx >= x1) return; + if (lx > rx) { + if (!va) va = { + x: fx, + y: y0 + }; else if (va.y >= y1) return; + vb = { + x: fx, + y: y1 + }; + } else { + if (!va) va = { + x: fx, + y: y1 + }; else if (va.y < y0) return; + vb = { + x: fx, + y: y0 + }; + } + } else { + fm = (lx - rx) / (ry - ly); + fb = fy - fm * fx; + if (fm < -1 || fm > 1) { + if (lx > rx) { + if (!va) va = { + x: (y0 - fb) / fm, + y: y0 + }; else if (va.y >= y1) return; + vb = { + x: (y1 - fb) / fm, + y: y1 + }; + } else { + if (!va) va = { + x: (y1 - fb) / fm, + y: y1 + }; else if (va.y < y0) return; + vb = { + x: (y0 - fb) / fm, + y: y0 + }; + } + } else { + if (ly < ry) { + if (!va) va = { + x: x0, + y: fm * x0 + fb + }; else if (va.x >= x1) return; + vb = { + x: x1, + y: fm * x1 + fb + }; + } else { + if (!va) va = { + x: x1, + y: fm * x1 + fb + }; else if (va.x < x0) return; + vb = { + x: x0, + y: fm * x0 + fb + }; + } + } + } + edge.a = va; + edge.b = vb; + return true; + } + function d3_geom_voronoiEdge(lSite, rSite) { + this.l = lSite; + this.r = rSite; + this.a = this.b = null; + } + function d3_geom_voronoiCreateEdge(lSite, rSite, va, vb) { + var edge = new d3_geom_voronoiEdge(lSite, rSite); + d3_geom_voronoiEdges.push(edge); + if (va) d3_geom_voronoiSetEdgeEnd(edge, lSite, rSite, va); + if (vb) d3_geom_voronoiSetEdgeEnd(edge, rSite, lSite, vb); + d3_geom_voronoiCells[lSite.i].edges.push(new d3_geom_voronoiHalfEdge(edge, lSite, rSite)); + d3_geom_voronoiCells[rSite.i].edges.push(new d3_geom_voronoiHalfEdge(edge, rSite, lSite)); + return edge; + } + function d3_geom_voronoiCreateBorderEdge(lSite, va, vb) { + var edge = new d3_geom_voronoiEdge(lSite, null); + edge.a = va; + edge.b = vb; + d3_geom_voronoiEdges.push(edge); + return edge; + } + function d3_geom_voronoiSetEdgeEnd(edge, lSite, rSite, vertex) { + if (!edge.a && !edge.b) { + edge.a = vertex; + edge.l = lSite; + edge.r = rSite; + } else if (edge.l === rSite) { + edge.b = vertex; + } else { + edge.a = vertex; + } + } + function d3_geom_voronoiHalfEdge(edge, lSite, rSite) { + var va = edge.a, vb = edge.b; + this.edge = edge; + this.site = lSite; + this.angle = rSite ? Math.atan2(rSite.y - lSite.y, rSite.x - lSite.x) : edge.l === lSite ? Math.atan2(vb.x - va.x, va.y - vb.y) : Math.atan2(va.x - vb.x, vb.y - va.y); + } + d3_geom_voronoiHalfEdge.prototype = { + start: function() { + return this.edge.l === this.site ? this.edge.a : this.edge.b; + }, + end: function() { + return this.edge.l === this.site ? this.edge.b : this.edge.a; + } + }; + function d3_geom_voronoiRedBlackTree() { + this._ = null; + } + function d3_geom_voronoiRedBlackNode(node) { + node.U = node.C = node.L = node.R = node.P = node.N = null; + } + d3_geom_voronoiRedBlackTree.prototype = { + insert: function(after, node) { + var parent, grandpa, uncle; + if (after) { + node.P = after; + node.N = after.N; + if (after.N) after.N.P = node; + after.N = node; + if (after.R) { + after = after.R; + while (after.L) after = after.L; + after.L = node; + } else { + after.R = node; + } + parent = after; + } else if (this._) { + after = d3_geom_voronoiRedBlackFirst(this._); + node.P = null; + node.N = after; + after.P = after.L = node; + parent = after; + } else { + node.P = node.N = null; + this._ = node; + parent = null; + } + node.L = node.R = null; + node.U = parent; + node.C = true; + after = node; + while (parent && parent.C) { + grandpa = parent.U; + if (parent === grandpa.L) { + uncle = grandpa.R; + if (uncle && uncle.C) { + parent.C = uncle.C = false; + grandpa.C = true; + after = grandpa; + } else { + if (after === parent.R) { + d3_geom_voronoiRedBlackRotateLeft(this, parent); + after = parent; + parent = after.U; + } + parent.C = false; + grandpa.C = true; + d3_geom_voronoiRedBlackRotateRight(this, grandpa); + } + } else { + uncle = grandpa.L; + if (uncle && uncle.C) { + parent.C = uncle.C = false; + grandpa.C = true; + after = grandpa; + } else { + if (after === parent.L) { + d3_geom_voronoiRedBlackRotateRight(this, parent); + after = parent; + parent = after.U; + } + parent.C = false; + grandpa.C = true; + d3_geom_voronoiRedBlackRotateLeft(this, grandpa); + } + } + parent = after.U; + } + this._.C = false; + }, + remove: function(node) { + if (node.N) node.N.P = node.P; + if (node.P) node.P.N = node.N; + node.N = node.P = null; + var parent = node.U, sibling, left = node.L, right = node.R, next, red; + if (!left) next = right; else if (!right) next = left; else next = d3_geom_voronoiRedBlackFirst(right); + if (parent) { + if (parent.L === node) parent.L = next; else parent.R = next; + } else { + this._ = next; + } + if (left && right) { + red = next.C; + next.C = node.C; + next.L = left; + left.U = next; + if (next !== right) { + parent = next.U; + next.U = node.U; + node = next.R; + parent.L = node; + next.R = right; + right.U = next; + } else { + next.U = parent; + parent = next; + node = next.R; + } + } else { + red = node.C; + node = next; + } + if (node) node.U = parent; + if (red) return; + if (node && node.C) { + node.C = false; + return; + } + do { + if (node === this._) break; + if (node === parent.L) { + sibling = parent.R; + if (sibling.C) { + sibling.C = false; + parent.C = true; + d3_geom_voronoiRedBlackRotateLeft(this, parent); + sibling = parent.R; + } + if (sibling.L && sibling.L.C || sibling.R && sibling.R.C) { + if (!sibling.R || !sibling.R.C) { + sibling.L.C = false; + sibling.C = true; + d3_geom_voronoiRedBlackRotateRight(this, sibling); + sibling = parent.R; + } + sibling.C = parent.C; + parent.C = sibling.R.C = false; + d3_geom_voronoiRedBlackRotateLeft(this, parent); + node = this._; + break; + } + } else { + sibling = parent.L; + if (sibling.C) { + sibling.C = false; + parent.C = true; + d3_geom_voronoiRedBlackRotateRight(this, parent); + sibling = parent.L; + } + if (sibling.L && sibling.L.C || sibling.R && sibling.R.C) { + if (!sibling.L || !sibling.L.C) { + sibling.R.C = false; + sibling.C = true; + d3_geom_voronoiRedBlackRotateLeft(this, sibling); + sibling = parent.L; + } + sibling.C = parent.C; + parent.C = sibling.L.C = false; + d3_geom_voronoiRedBlackRotateRight(this, parent); + node = this._; + break; + } + } + sibling.C = true; + node = parent; + parent = parent.U; + } while (!node.C); + if (node) node.C = false; + } + }; + function d3_geom_voronoiRedBlackRotateLeft(tree, node) { + var p = node, q = node.R, parent = p.U; + if (parent) { + if (parent.L === p) parent.L = q; else parent.R = q; + } else { + tree._ = q; + } + q.U = parent; + p.U = q; + p.R = q.L; + if (p.R) p.R.U = p; + q.L = p; + } + function d3_geom_voronoiRedBlackRotateRight(tree, node) { + var p = node, q = node.L, parent = p.U; + if (parent) { + if (parent.L === p) parent.L = q; else parent.R = q; + } else { + tree._ = q; + } + q.U = parent; + p.U = q; + p.L = q.R; + if (p.L) p.L.U = p; + q.R = p; + } + function d3_geom_voronoiRedBlackFirst(node) { + while (node.L) node = node.L; + return node; + } + function d3_geom_voronoi(sites, bbox) { + var site = sites.sort(d3_geom_voronoiVertexOrder).pop(), x0, y0, circle; + d3_geom_voronoiEdges = []; + d3_geom_voronoiCells = new Array(sites.length); + d3_geom_voronoiBeaches = new d3_geom_voronoiRedBlackTree(); + d3_geom_voronoiCircles = new d3_geom_voronoiRedBlackTree(); + while (true) { + circle = d3_geom_voronoiFirstCircle; + if (site && (!circle || site.y < circle.y || site.y === circle.y && site.x < circle.x)) { + if (site.x !== x0 || site.y !== y0) { + d3_geom_voronoiCells[site.i] = new d3_geom_voronoiCell(site); + d3_geom_voronoiAddBeach(site); + x0 = site.x, y0 = site.y; + } + site = sites.pop(); + } else if (circle) { + d3_geom_voronoiRemoveBeach(circle.arc); + } else { + break; + } + } + if (bbox) d3_geom_voronoiClipEdges(bbox), d3_geom_voronoiCloseCells(bbox); + var diagram = { + cells: d3_geom_voronoiCells, + edges: d3_geom_voronoiEdges + }; + d3_geom_voronoiBeaches = d3_geom_voronoiCircles = d3_geom_voronoiEdges = d3_geom_voronoiCells = null; + return diagram; + } + function d3_geom_voronoiVertexOrder(a, b) { + return b.y - a.y || b.x - a.x; + } + d3.geom.voronoi = function(points) { + var x = d3_geom_pointX, y = d3_geom_pointY, fx = x, fy = y, clipExtent = d3_geom_voronoiClipExtent; + if (points) return voronoi(points); + function voronoi(data) { + var polygons = new Array(data.length), x0 = clipExtent[0][0], y0 = clipExtent[0][1], x1 = clipExtent[1][0], y1 = clipExtent[1][1]; + d3_geom_voronoi(sites(data), clipExtent).cells.forEach(function(cell, i) { + var edges = cell.edges, site = cell.site, polygon = polygons[i] = edges.length ? edges.map(function(e) { + var s = e.start(); + return [ s.x, s.y ]; + }) : site.x >= x0 && site.x <= x1 && site.y >= y0 && site.y <= y1 ? [ [ x0, y1 ], [ x1, y1 ], [ x1, y0 ], [ x0, y0 ] ] : []; + polygon.point = data[i]; + }); + return polygons; + } + function sites(data) { + return data.map(function(d, i) { + return { + x: Math.round(fx(d, i) / ε) * ε, + y: Math.round(fy(d, i) / ε) * ε, + i: i + }; + }); + } + voronoi.links = function(data) { + return d3_geom_voronoi(sites(data)).edges.filter(function(edge) { + return edge.l && edge.r; + }).map(function(edge) { + return { + source: data[edge.l.i], + target: data[edge.r.i] + }; + }); + }; + voronoi.triangles = function(data) { + var triangles = []; + d3_geom_voronoi(sites(data)).cells.forEach(function(cell, i) { + var site = cell.site, edges = cell.edges.sort(d3_geom_voronoiHalfEdgeOrder), j = -1, m = edges.length, e0, s0, e1 = edges[m - 1].edge, s1 = e1.l === site ? e1.r : e1.l; + while (++j < m) { + e0 = e1; + s0 = s1; + e1 = edges[j].edge; + s1 = e1.l === site ? e1.r : e1.l; + if (i < s0.i && i < s1.i && d3_geom_voronoiTriangleArea(site, s0, s1) < 0) { + triangles.push([ data[i], data[s0.i], data[s1.i] ]); + } + } + }); + return triangles; + }; + voronoi.x = function(_) { + return arguments.length ? (fx = d3_functor(x = _), voronoi) : x; + }; + voronoi.y = function(_) { + return arguments.length ? (fy = d3_functor(y = _), voronoi) : y; + }; + voronoi.clipExtent = function(_) { + if (!arguments.length) return clipExtent === d3_geom_voronoiClipExtent ? null : clipExtent; + clipExtent = _ == null ? d3_geom_voronoiClipExtent : _; + return voronoi; + }; + voronoi.size = function(_) { + if (!arguments.length) return clipExtent === d3_geom_voronoiClipExtent ? null : clipExtent && clipExtent[1]; + return voronoi.clipExtent(_ && [ [ 0, 0 ], _ ]); + }; + return voronoi; + }; + var d3_geom_voronoiClipExtent = [ [ -1e6, -1e6 ], [ 1e6, 1e6 ] ]; + function d3_geom_voronoiTriangleArea(a, b, c) { + return (a.x - c.x) * (b.y - a.y) - (a.x - b.x) * (c.y - a.y); + } + d3.geom.delaunay = function(vertices) { + return d3.geom.voronoi().triangles(vertices); + }; + d3.geom.quadtree = function(points, x1, y1, x2, y2) { + var x = d3_geom_pointX, y = d3_geom_pointY, compat; + if (compat = arguments.length) { + x = d3_geom_quadtreeCompatX; + y = d3_geom_quadtreeCompatY; + if (compat === 3) { + y2 = y1; + x2 = x1; + y1 = x1 = 0; + } + return quadtree(points); + } + function quadtree(data) { + var d, fx = d3_functor(x), fy = d3_functor(y), xs, ys, i, n, x1_, y1_, x2_, y2_; + if (x1 != null) { + x1_ = x1, y1_ = y1, x2_ = x2, y2_ = y2; + } else { + x2_ = y2_ = -(x1_ = y1_ = Infinity); + xs = [], ys = []; + n = data.length; + if (compat) for (i = 0; i < n; ++i) { + d = data[i]; + if (d.x < x1_) x1_ = d.x; + if (d.y < y1_) y1_ = d.y; + if (d.x > x2_) x2_ = d.x; + if (d.y > y2_) y2_ = d.y; + xs.push(d.x); + ys.push(d.y); + } else for (i = 0; i < n; ++i) { + var x_ = +fx(d = data[i], i), y_ = +fy(d, i); + if (x_ < x1_) x1_ = x_; + if (y_ < y1_) y1_ = y_; + if (x_ > x2_) x2_ = x_; + if (y_ > y2_) y2_ = y_; + xs.push(x_); + ys.push(y_); + } + } + var dx = x2_ - x1_, dy = y2_ - y1_; + if (dx > dy) y2_ = y1_ + dx; else x2_ = x1_ + dy; + function insert(n, d, x, y, x1, y1, x2, y2) { + if (isNaN(x) || isNaN(y)) return; + if (n.leaf) { + var nx = n.x, ny = n.y; + if (nx != null) { + if (abs(nx - x) + abs(ny - y) < .01) { + insertChild(n, d, x, y, x1, y1, x2, y2); + } else { + var nPoint = n.point; + n.x = n.y = n.point = null; + insertChild(n, nPoint, nx, ny, x1, y1, x2, y2); + insertChild(n, d, x, y, x1, y1, x2, y2); + } + } else { + n.x = x, n.y = y, n.point = d; + } + } else { + insertChild(n, d, x, y, x1, y1, x2, y2); + } + } + function insertChild(n, d, x, y, x1, y1, x2, y2) { + var xm = (x1 + x2) * .5, ym = (y1 + y2) * .5, right = x >= xm, below = y >= ym, i = below << 1 | right; + n.leaf = false; + n = n.nodes[i] || (n.nodes[i] = d3_geom_quadtreeNode()); + if (right) x1 = xm; else x2 = xm; + if (below) y1 = ym; else y2 = ym; + insert(n, d, x, y, x1, y1, x2, y2); + } + var root = d3_geom_quadtreeNode(); + root.add = function(d) { + insert(root, d, +fx(d, ++i), +fy(d, i), x1_, y1_, x2_, y2_); + }; + root.visit = function(f) { + d3_geom_quadtreeVisit(f, root, x1_, y1_, x2_, y2_); + }; + root.find = function(point) { + return d3_geom_quadtreeFind(root, point[0], point[1], x1_, y1_, x2_, y2_); + }; + i = -1; + if (x1 == null) { + while (++i < n) { + insert(root, data[i], xs[i], ys[i], x1_, y1_, x2_, y2_); + } + --i; + } else data.forEach(root.add); + xs = ys = data = d = null; + return root; + } + quadtree.x = function(_) { + return arguments.length ? (x = _, quadtree) : x; + }; + quadtree.y = function(_) { + return arguments.length ? (y = _, quadtree) : y; + }; + quadtree.extent = function(_) { + if (!arguments.length) return x1 == null ? null : [ [ x1, y1 ], [ x2, y2 ] ]; + if (_ == null) x1 = y1 = x2 = y2 = null; else x1 = +_[0][0], y1 = +_[0][1], x2 = +_[1][0], + y2 = +_[1][1]; + return quadtree; + }; + quadtree.size = function(_) { + if (!arguments.length) return x1 == null ? null : [ x2 - x1, y2 - y1 ]; + if (_ == null) x1 = y1 = x2 = y2 = null; else x1 = y1 = 0, x2 = +_[0], y2 = +_[1]; + return quadtree; + }; + return quadtree; + }; + function d3_geom_quadtreeCompatX(d) { + return d.x; + } + function d3_geom_quadtreeCompatY(d) { + return d.y; + } + function d3_geom_quadtreeNode() { + return { + leaf: true, + nodes: [], + point: null, + x: null, + y: null + }; + } + function d3_geom_quadtreeVisit(f, node, x1, y1, x2, y2) { + if (!f(node, x1, y1, x2, y2)) { + var sx = (x1 + x2) * .5, sy = (y1 + y2) * .5, children = node.nodes; + if (children[0]) d3_geom_quadtreeVisit(f, children[0], x1, y1, sx, sy); + if (children[1]) d3_geom_quadtreeVisit(f, children[1], sx, y1, x2, sy); + if (children[2]) d3_geom_quadtreeVisit(f, children[2], x1, sy, sx, y2); + if (children[3]) d3_geom_quadtreeVisit(f, children[3], sx, sy, x2, y2); + } + } + function d3_geom_quadtreeFind(root, x, y, x0, y0, x3, y3) { + var minDistance2 = Infinity, closestPoint; + (function find(node, x1, y1, x2, y2) { + if (x1 > x3 || y1 > y3 || x2 < x0 || y2 < y0) return; + if (point = node.point) { + var point, dx = x - node.x, dy = y - node.y, distance2 = dx * dx + dy * dy; + if (distance2 < minDistance2) { + var distance = Math.sqrt(minDistance2 = distance2); + x0 = x - distance, y0 = y - distance; + x3 = x + distance, y3 = y + distance; + closestPoint = point; + } + } + var children = node.nodes, xm = (x1 + x2) * .5, ym = (y1 + y2) * .5, right = x >= xm, below = y >= ym; + for (var i = below << 1 | right, j = i + 4; i < j; ++i) { + if (node = children[i & 3]) switch (i & 3) { + case 0: + find(node, x1, y1, xm, ym); + break; + + case 1: + find(node, xm, y1, x2, ym); + break; + + case 2: + find(node, x1, ym, xm, y2); + break; + + case 3: + find(node, xm, ym, x2, y2); + break; + } + } + })(root, x0, y0, x3, y3); + return closestPoint; + } + d3.interpolateRgb = d3_interpolateRgb; + function d3_interpolateRgb(a, b) { + a = d3.rgb(a); + b = d3.rgb(b); + var ar = a.r, ag = a.g, ab = a.b, br = b.r - ar, bg = b.g - ag, bb = b.b - ab; + return function(t) { + return "#" + d3_rgb_hex(Math.round(ar + br * t)) + d3_rgb_hex(Math.round(ag + bg * t)) + d3_rgb_hex(Math.round(ab + bb * t)); + }; + } + d3.interpolateObject = d3_interpolateObject; + function d3_interpolateObject(a, b) { + var i = {}, c = {}, k; + for (k in a) { + if (k in b) { + i[k] = d3_interpolate(a[k], b[k]); + } else { + c[k] = a[k]; + } + } + for (k in b) { + if (!(k in a)) { + c[k] = b[k]; + } + } + return function(t) { + for (k in i) c[k] = i[k](t); + return c; + }; + } + d3.interpolateNumber = d3_interpolateNumber; + function d3_interpolateNumber(a, b) { + a = +a, b = +b; + return function(t) { + return a * (1 - t) + b * t; + }; + } + d3.interpolateString = d3_interpolateString; + function d3_interpolateString(a, b) { + var bi = d3_interpolate_numberA.lastIndex = d3_interpolate_numberB.lastIndex = 0, am, bm, bs, i = -1, s = [], q = []; + a = a + "", b = b + ""; + while ((am = d3_interpolate_numberA.exec(a)) && (bm = d3_interpolate_numberB.exec(b))) { + if ((bs = bm.index) > bi) { + bs = b.slice(bi, bs); + if (s[i]) s[i] += bs; else s[++i] = bs; + } + if ((am = am[0]) === (bm = bm[0])) { + if (s[i]) s[i] += bm; else s[++i] = bm; + } else { + s[++i] = null; + q.push({ + i: i, + x: d3_interpolateNumber(am, bm) + }); + } + bi = d3_interpolate_numberB.lastIndex; + } + if (bi < b.length) { + bs = b.slice(bi); + if (s[i]) s[i] += bs; else s[++i] = bs; + } + return s.length < 2 ? q[0] ? (b = q[0].x, function(t) { + return b(t) + ""; + }) : function() { + return b; + } : (b = q.length, function(t) { + for (var i = 0, o; i < b; ++i) s[(o = q[i]).i] = o.x(t); + return s.join(""); + }); + } + var d3_interpolate_numberA = /[-+]?(?:\d+\.?\d*|\.?\d+)(?:[eE][-+]?\d+)?/g, d3_interpolate_numberB = new RegExp(d3_interpolate_numberA.source, "g"); + d3.interpolate = d3_interpolate; + function d3_interpolate(a, b) { + var i = d3.interpolators.length, f; + while (--i >= 0 && !(f = d3.interpolators[i](a, b))) ; + return f; + } + d3.interpolators = [ function(a, b) { + var t = typeof b; + return (t === "string" ? d3_rgb_names.has(b.toLowerCase()) || /^(#|rgb\(|hsl\()/i.test(b) ? d3_interpolateRgb : d3_interpolateString : b instanceof d3_color ? d3_interpolateRgb : Array.isArray(b) ? d3_interpolateArray : t === "object" && isNaN(b) ? d3_interpolateObject : d3_interpolateNumber)(a, b); + } ]; + d3.interpolateArray = d3_interpolateArray; + function d3_interpolateArray(a, b) { + var x = [], c = [], na = a.length, nb = b.length, n0 = Math.min(a.length, b.length), i; + for (i = 0; i < n0; ++i) x.push(d3_interpolate(a[i], b[i])); + for (;i < na; ++i) c[i] = a[i]; + for (;i < nb; ++i) c[i] = b[i]; + return function(t) { + for (i = 0; i < n0; ++i) c[i] = x[i](t); + return c; + }; + } + var d3_ease_default = function() { + return d3_identity; + }; + var d3_ease = d3.map({ + linear: d3_ease_default, + poly: d3_ease_poly, + quad: function() { + return d3_ease_quad; + }, + cubic: function() { + return d3_ease_cubic; + }, + sin: function() { + return d3_ease_sin; + }, + exp: function() { + return d3_ease_exp; + }, + circle: function() { + return d3_ease_circle; + }, + elastic: d3_ease_elastic, + back: d3_ease_back, + bounce: function() { + return d3_ease_bounce; + } + }); + var d3_ease_mode = d3.map({ + "in": d3_identity, + out: d3_ease_reverse, + "in-out": d3_ease_reflect, + "out-in": function(f) { + return d3_ease_reflect(d3_ease_reverse(f)); + } + }); + d3.ease = function(name) { + var i = name.indexOf("-"), t = i >= 0 ? name.slice(0, i) : name, m = i >= 0 ? name.slice(i + 1) : "in"; + t = d3_ease.get(t) || d3_ease_default; + m = d3_ease_mode.get(m) || d3_identity; + return d3_ease_clamp(m(t.apply(null, d3_arraySlice.call(arguments, 1)))); + }; + function d3_ease_clamp(f) { + return function(t) { + return t <= 0 ? 0 : t >= 1 ? 1 : f(t); + }; + } + function d3_ease_reverse(f) { + return function(t) { + return 1 - f(1 - t); + }; + } + function d3_ease_reflect(f) { + return function(t) { + return .5 * (t < .5 ? f(2 * t) : 2 - f(2 - 2 * t)); + }; + } + function d3_ease_quad(t) { + return t * t; + } + function d3_ease_cubic(t) { + return t * t * t; + } + function d3_ease_cubicInOut(t) { + if (t <= 0) return 0; + if (t >= 1) return 1; + var t2 = t * t, t3 = t2 * t; + return 4 * (t < .5 ? t3 : 3 * (t - t2) + t3 - .75); + } + function d3_ease_poly(e) { + return function(t) { + return Math.pow(t, e); + }; + } + function d3_ease_sin(t) { + return 1 - Math.cos(t * halfπ); + } + function d3_ease_exp(t) { + return Math.pow(2, 10 * (t - 1)); + } + function d3_ease_circle(t) { + return 1 - Math.sqrt(1 - t * t); + } + function d3_ease_elastic(a, p) { + var s; + if (arguments.length < 2) p = .45; + if (arguments.length) s = p / τ * Math.asin(1 / a); else a = 1, s = p / 4; + return function(t) { + return 1 + a * Math.pow(2, -10 * t) * Math.sin((t - s) * τ / p); + }; + } + function d3_ease_back(s) { + if (!s) s = 1.70158; + return function(t) { + return t * t * ((s + 1) * t - s); + }; + } + function d3_ease_bounce(t) { + return t < 1 / 2.75 ? 7.5625 * t * t : t < 2 / 2.75 ? 7.5625 * (t -= 1.5 / 2.75) * t + .75 : t < 2.5 / 2.75 ? 7.5625 * (t -= 2.25 / 2.75) * t + .9375 : 7.5625 * (t -= 2.625 / 2.75) * t + .984375; + } + d3.interpolateHcl = d3_interpolateHcl; + function d3_interpolateHcl(a, b) { + a = d3.hcl(a); + b = d3.hcl(b); + var ah = a.h, ac = a.c, al = a.l, bh = b.h - ah, bc = b.c - ac, bl = b.l - al; + if (isNaN(bc)) bc = 0, ac = isNaN(ac) ? b.c : ac; + if (isNaN(bh)) bh = 0, ah = isNaN(ah) ? b.h : ah; else if (bh > 180) bh -= 360; else if (bh < -180) bh += 360; + return function(t) { + return d3_hcl_lab(ah + bh * t, ac + bc * t, al + bl * t) + ""; + }; + } + d3.interpolateHsl = d3_interpolateHsl; + function d3_interpolateHsl(a, b) { + a = d3.hsl(a); + b = d3.hsl(b); + var ah = a.h, as = a.s, al = a.l, bh = b.h - ah, bs = b.s - as, bl = b.l - al; + if (isNaN(bs)) bs = 0, as = isNaN(as) ? b.s : as; + if (isNaN(bh)) bh = 0, ah = isNaN(ah) ? b.h : ah; else if (bh > 180) bh -= 360; else if (bh < -180) bh += 360; + return function(t) { + return d3_hsl_rgb(ah + bh * t, as + bs * t, al + bl * t) + ""; + }; + } + d3.interpolateLab = d3_interpolateLab; + function d3_interpolateLab(a, b) { + a = d3.lab(a); + b = d3.lab(b); + var al = a.l, aa = a.a, ab = a.b, bl = b.l - al, ba = b.a - aa, bb = b.b - ab; + return function(t) { + return d3_lab_rgb(al + bl * t, aa + ba * t, ab + bb * t) + ""; + }; + } + d3.interpolateRound = d3_interpolateRound; + function d3_interpolateRound(a, b) { + b -= a; + return function(t) { + return Math.round(a + b * t); + }; + } + d3.transform = function(string) { + var g = d3_document.createElementNS(d3.ns.prefix.svg, "g"); + return (d3.transform = function(string) { + if (string != null) { + g.setAttribute("transform", string); + var t = g.transform.baseVal.consolidate(); + } + return new d3_transform(t ? t.matrix : d3_transformIdentity); + })(string); + }; + function d3_transform(m) { + var r0 = [ m.a, m.b ], r1 = [ m.c, m.d ], kx = d3_transformNormalize(r0), kz = d3_transformDot(r0, r1), ky = d3_transformNormalize(d3_transformCombine(r1, r0, -kz)) || 0; + if (r0[0] * r1[1] < r1[0] * r0[1]) { + r0[0] *= -1; + r0[1] *= -1; + kx *= -1; + kz *= -1; + } + this.rotate = (kx ? Math.atan2(r0[1], r0[0]) : Math.atan2(-r1[0], r1[1])) * d3_degrees; + this.translate = [ m.e, m.f ]; + this.scale = [ kx, ky ]; + this.skew = ky ? Math.atan2(kz, ky) * d3_degrees : 0; + } + d3_transform.prototype.toString = function() { + return "translate(" + this.translate + ")rotate(" + this.rotate + ")skewX(" + this.skew + ")scale(" + this.scale + ")"; + }; + function d3_transformDot(a, b) { + return a[0] * b[0] + a[1] * b[1]; + } + function d3_transformNormalize(a) { + var k = Math.sqrt(d3_transformDot(a, a)); + if (k) { + a[0] /= k; + a[1] /= k; + } + return k; + } + function d3_transformCombine(a, b, k) { + a[0] += k * b[0]; + a[1] += k * b[1]; + return a; + } + var d3_transformIdentity = { + a: 1, + b: 0, + c: 0, + d: 1, + e: 0, + f: 0 + }; + d3.interpolateTransform = d3_interpolateTransform; + function d3_interpolateTransform(a, b) { + var s = [], q = [], n, A = d3.transform(a), B = d3.transform(b), ta = A.translate, tb = B.translate, ra = A.rotate, rb = B.rotate, wa = A.skew, wb = B.skew, ka = A.scale, kb = B.scale; + if (ta[0] != tb[0] || ta[1] != tb[1]) { + s.push("translate(", null, ",", null, ")"); + q.push({ + i: 1, + x: d3_interpolateNumber(ta[0], tb[0]) + }, { + i: 3, + x: d3_interpolateNumber(ta[1], tb[1]) + }); + } else if (tb[0] || tb[1]) { + s.push("translate(" + tb + ")"); + } else { + s.push(""); + } + if (ra != rb) { + if (ra - rb > 180) rb += 360; else if (rb - ra > 180) ra += 360; + q.push({ + i: s.push(s.pop() + "rotate(", null, ")") - 2, + x: d3_interpolateNumber(ra, rb) + }); + } else if (rb) { + s.push(s.pop() + "rotate(" + rb + ")"); + } + if (wa != wb) { + q.push({ + i: s.push(s.pop() + "skewX(", null, ")") - 2, + x: d3_interpolateNumber(wa, wb) + }); + } else if (wb) { + s.push(s.pop() + "skewX(" + wb + ")"); + } + if (ka[0] != kb[0] || ka[1] != kb[1]) { + n = s.push(s.pop() + "scale(", null, ",", null, ")"); + q.push({ + i: n - 4, + x: d3_interpolateNumber(ka[0], kb[0]) + }, { + i: n - 2, + x: d3_interpolateNumber(ka[1], kb[1]) + }); + } else if (kb[0] != 1 || kb[1] != 1) { + s.push(s.pop() + "scale(" + kb + ")"); + } + n = q.length; + return function(t) { + var i = -1, o; + while (++i < n) s[(o = q[i]).i] = o.x(t); + return s.join(""); + }; + } + function d3_uninterpolateNumber(a, b) { + b = (b -= a = +a) || 1 / b; + return function(x) { + return (x - a) / b; + }; + } + function d3_uninterpolateClamp(a, b) { + b = (b -= a = +a) || 1 / b; + return function(x) { + return Math.max(0, Math.min(1, (x - a) / b)); + }; + } + d3.layout = {}; + d3.layout.bundle = function() { + return function(links) { + var paths = [], i = -1, n = links.length; + while (++i < n) paths.push(d3_layout_bundlePath(links[i])); + return paths; + }; + }; + function d3_layout_bundlePath(link) { + var start = link.source, end = link.target, lca = d3_layout_bundleLeastCommonAncestor(start, end), points = [ start ]; + while (start !== lca) { + start = start.parent; + points.push(start); + } + var k = points.length; + while (end !== lca) { + points.splice(k, 0, end); + end = end.parent; + } + return points; + } + function d3_layout_bundleAncestors(node) { + var ancestors = [], parent = node.parent; + while (parent != null) { + ancestors.push(node); + node = parent; + parent = parent.parent; + } + ancestors.push(node); + return ancestors; + } + function d3_layout_bundleLeastCommonAncestor(a, b) { + if (a === b) return a; + var aNodes = d3_layout_bundleAncestors(a), bNodes = d3_layout_bundleAncestors(b), aNode = aNodes.pop(), bNode = bNodes.pop(), sharedNode = null; + while (aNode === bNode) { + sharedNode = aNode; + aNode = aNodes.pop(); + bNode = bNodes.pop(); + } + return sharedNode; + } + d3.layout.chord = function() { + var chord = {}, chords, groups, matrix, n, padding = 0, sortGroups, sortSubgroups, sortChords; + function relayout() { + var subgroups = {}, groupSums = [], groupIndex = d3.range(n), subgroupIndex = [], k, x, x0, i, j; + chords = []; + groups = []; + k = 0, i = -1; + while (++i < n) { + x = 0, j = -1; + while (++j < n) { + x += matrix[i][j]; + } + groupSums.push(x); + subgroupIndex.push(d3.range(n)); + k += x; + } + if (sortGroups) { + groupIndex.sort(function(a, b) { + return sortGroups(groupSums[a], groupSums[b]); + }); + } + if (sortSubgroups) { + subgroupIndex.forEach(function(d, i) { + d.sort(function(a, b) { + return sortSubgroups(matrix[i][a], matrix[i][b]); + }); + }); + } + k = (τ - padding * n) / k; + x = 0, i = -1; + while (++i < n) { + x0 = x, j = -1; + while (++j < n) { + var di = groupIndex[i], dj = subgroupIndex[di][j], v = matrix[di][dj], a0 = x, a1 = x += v * k; + subgroups[di + "-" + dj] = { + index: di, + subindex: dj, + startAngle: a0, + endAngle: a1, + value: v + }; + } + groups[di] = { + index: di, + startAngle: x0, + endAngle: x, + value: (x - x0) / k + }; + x += padding; + } + i = -1; + while (++i < n) { + j = i - 1; + while (++j < n) { + var source = subgroups[i + "-" + j], target = subgroups[j + "-" + i]; + if (source.value || target.value) { + chords.push(source.value < target.value ? { + source: target, + target: source + } : { + source: source, + target: target + }); + } + } + } + if (sortChords) resort(); + } + function resort() { + chords.sort(function(a, b) { + return sortChords((a.source.value + a.target.value) / 2, (b.source.value + b.target.value) / 2); + }); + } + chord.matrix = function(x) { + if (!arguments.length) return matrix; + n = (matrix = x) && matrix.length; + chords = groups = null; + return chord; + }; + chord.padding = function(x) { + if (!arguments.length) return padding; + padding = x; + chords = groups = null; + return chord; + }; + chord.sortGroups = function(x) { + if (!arguments.length) return sortGroups; + sortGroups = x; + chords = groups = null; + return chord; + }; + chord.sortSubgroups = function(x) { + if (!arguments.length) return sortSubgroups; + sortSubgroups = x; + chords = null; + return chord; + }; + chord.sortChords = function(x) { + if (!arguments.length) return sortChords; + sortChords = x; + if (chords) resort(); + return chord; + }; + chord.chords = function() { + if (!chords) relayout(); + return chords; + }; + chord.groups = function() { + if (!groups) relayout(); + return groups; + }; + return chord; + }; + d3.layout.force = function() { + var force = {}, event = d3.dispatch("start", "tick", "end"), size = [ 1, 1 ], drag, alpha, friction = .9, linkDistance = d3_layout_forceLinkDistance, linkStrength = d3_layout_forceLinkStrength, charge = -30, chargeDistance2 = d3_layout_forceChargeDistance2, gravity = .1, theta2 = .64, nodes = [], links = [], distances, strengths, charges; + function repulse(node) { + return function(quad, x1, _, x2) { + if (quad.point !== node) { + var dx = quad.cx - node.x, dy = quad.cy - node.y, dw = x2 - x1, dn = dx * dx + dy * dy; + if (dw * dw / theta2 < dn) { + if (dn < chargeDistance2) { + var k = quad.charge / dn; + node.px -= dx * k; + node.py -= dy * k; + } + return true; + } + if (quad.point && dn && dn < chargeDistance2) { + var k = quad.pointCharge / dn; + node.px -= dx * k; + node.py -= dy * k; + } + } + return !quad.charge; + }; + } + force.tick = function() { + if ((alpha *= .99) < .005) { + event.end({ + type: "end", + alpha: alpha = 0 + }); + return true; + } + var n = nodes.length, m = links.length, q, i, o, s, t, l, k, x, y; + for (i = 0; i < m; ++i) { + o = links[i]; + s = o.source; + t = o.target; + x = t.x - s.x; + y = t.y - s.y; + if (l = x * x + y * y) { + l = alpha * strengths[i] * ((l = Math.sqrt(l)) - distances[i]) / l; + x *= l; + y *= l; + t.x -= x * (k = s.weight / (t.weight + s.weight)); + t.y -= y * k; + s.x += x * (k = 1 - k); + s.y += y * k; + } + } + if (k = alpha * gravity) { + x = size[0] / 2; + y = size[1] / 2; + i = -1; + if (k) while (++i < n) { + o = nodes[i]; + o.x += (x - o.x) * k; + o.y += (y - o.y) * k; + } + } + if (charge) { + d3_layout_forceAccumulate(q = d3.geom.quadtree(nodes), alpha, charges); + i = -1; + while (++i < n) { + if (!(o = nodes[i]).fixed) { + q.visit(repulse(o)); + } + } + } + i = -1; + while (++i < n) { + o = nodes[i]; + if (o.fixed) { + o.x = o.px; + o.y = o.py; + } else { + o.x -= (o.px - (o.px = o.x)) * friction; + o.y -= (o.py - (o.py = o.y)) * friction; + } + } + event.tick({ + type: "tick", + alpha: alpha + }); + }; + force.nodes = function(x) { + if (!arguments.length) return nodes; + nodes = x; + return force; + }; + force.links = function(x) { + if (!arguments.length) return links; + links = x; + return force; + }; + force.size = function(x) { + if (!arguments.length) return size; + size = x; + return force; + }; + force.linkDistance = function(x) { + if (!arguments.length) return linkDistance; + linkDistance = typeof x === "function" ? x : +x; + return force; + }; + force.distance = force.linkDistance; + force.linkStrength = function(x) { + if (!arguments.length) return linkStrength; + linkStrength = typeof x === "function" ? x : +x; + return force; + }; + force.friction = function(x) { + if (!arguments.length) return friction; + friction = +x; + return force; + }; + force.charge = function(x) { + if (!arguments.length) return charge; + charge = typeof x === "function" ? x : +x; + return force; + }; + force.chargeDistance = function(x) { + if (!arguments.length) return Math.sqrt(chargeDistance2); + chargeDistance2 = x * x; + return force; + }; + force.gravity = function(x) { + if (!arguments.length) return gravity; + gravity = +x; + return force; + }; + force.theta = function(x) { + if (!arguments.length) return Math.sqrt(theta2); + theta2 = x * x; + return force; + }; + force.alpha = function(x) { + if (!arguments.length) return alpha; + x = +x; + if (alpha) { + if (x > 0) alpha = x; else alpha = 0; + } else if (x > 0) { + event.start({ + type: "start", + alpha: alpha = x + }); + d3.timer(force.tick); + } + return force; + }; + force.start = function() { + var i, n = nodes.length, m = links.length, w = size[0], h = size[1], neighbors, o; + for (i = 0; i < n; ++i) { + (o = nodes[i]).index = i; + o.weight = 0; + } + for (i = 0; i < m; ++i) { + o = links[i]; + if (typeof o.source == "number") o.source = nodes[o.source]; + if (typeof o.target == "number") o.target = nodes[o.target]; + ++o.source.weight; + ++o.target.weight; + } + for (i = 0; i < n; ++i) { + o = nodes[i]; + if (isNaN(o.x)) o.x = position("x", w); + if (isNaN(o.y)) o.y = position("y", h); + if (isNaN(o.px)) o.px = o.x; + if (isNaN(o.py)) o.py = o.y; + } + distances = []; + if (typeof linkDistance === "function") for (i = 0; i < m; ++i) distances[i] = +linkDistance.call(this, links[i], i); else for (i = 0; i < m; ++i) distances[i] = linkDistance; + strengths = []; + if (typeof linkStrength === "function") for (i = 0; i < m; ++i) strengths[i] = +linkStrength.call(this, links[i], i); else for (i = 0; i < m; ++i) strengths[i] = linkStrength; + charges = []; + if (typeof charge === "function") for (i = 0; i < n; ++i) charges[i] = +charge.call(this, nodes[i], i); else for (i = 0; i < n; ++i) charges[i] = charge; + function position(dimension, size) { + if (!neighbors) { + neighbors = new Array(n); + for (j = 0; j < n; ++j) { + neighbors[j] = []; + } + for (j = 0; j < m; ++j) { + var o = links[j]; + neighbors[o.source.index].push(o.target); + neighbors[o.target.index].push(o.source); + } + } + var candidates = neighbors[i], j = -1, l = candidates.length, x; + while (++j < l) if (!isNaN(x = candidates[j][dimension])) return x; + return Math.random() * size; + } + return force.resume(); + }; + force.resume = function() { + return force.alpha(.1); + }; + force.stop = function() { + return force.alpha(0); + }; + force.drag = function() { + if (!drag) drag = d3.behavior.drag().origin(d3_identity).on("dragstart.force", d3_layout_forceDragstart).on("drag.force", dragmove).on("dragend.force", d3_layout_forceDragend); + if (!arguments.length) return drag; + this.on("mouseover.force", d3_layout_forceMouseover).on("mouseout.force", d3_layout_forceMouseout).call(drag); + }; + function dragmove(d) { + d.px = d3.event.x, d.py = d3.event.y; + force.resume(); + } + return d3.rebind(force, event, "on"); + }; + function d3_layout_forceDragstart(d) { + d.fixed |= 2; + } + function d3_layout_forceDragend(d) { + d.fixed &= ~6; + } + function d3_layout_forceMouseover(d) { + d.fixed |= 4; + d.px = d.x, d.py = d.y; + } + function d3_layout_forceMouseout(d) { + d.fixed &= ~4; + } + function d3_layout_forceAccumulate(quad, alpha, charges) { + var cx = 0, cy = 0; + quad.charge = 0; + if (!quad.leaf) { + var nodes = quad.nodes, n = nodes.length, i = -1, c; + while (++i < n) { + c = nodes[i]; + if (c == null) continue; + d3_layout_forceAccumulate(c, alpha, charges); + quad.charge += c.charge; + cx += c.charge * c.cx; + cy += c.charge * c.cy; + } + } + if (quad.point) { + if (!quad.leaf) { + quad.point.x += Math.random() - .5; + quad.point.y += Math.random() - .5; + } + var k = alpha * charges[quad.point.index]; + quad.charge += quad.pointCharge = k; + cx += k * quad.point.x; + cy += k * quad.point.y; + } + quad.cx = cx / quad.charge; + quad.cy = cy / quad.charge; + } + var d3_layout_forceLinkDistance = 20, d3_layout_forceLinkStrength = 1, d3_layout_forceChargeDistance2 = Infinity; + d3.layout.hierarchy = function() { + var sort = d3_layout_hierarchySort, children = d3_layout_hierarchyChildren, value = d3_layout_hierarchyValue; + function hierarchy(root) { + var stack = [ root ], nodes = [], node; + root.depth = 0; + while ((node = stack.pop()) != null) { + nodes.push(node); + if ((childs = children.call(hierarchy, node, node.depth)) && (n = childs.length)) { + var n, childs, child; + while (--n >= 0) { + stack.push(child = childs[n]); + child.parent = node; + child.depth = node.depth + 1; + } + if (value) node.value = 0; + node.children = childs; + } else { + if (value) node.value = +value.call(hierarchy, node, node.depth) || 0; + delete node.children; + } + } + d3_layout_hierarchyVisitAfter(root, function(node) { + var childs, parent; + if (sort && (childs = node.children)) childs.sort(sort); + if (value && (parent = node.parent)) parent.value += node.value; + }); + return nodes; + } + hierarchy.sort = function(x) { + if (!arguments.length) return sort; + sort = x; + return hierarchy; + }; + hierarchy.children = function(x) { + if (!arguments.length) return children; + children = x; + return hierarchy; + }; + hierarchy.value = function(x) { + if (!arguments.length) return value; + value = x; + return hierarchy; + }; + hierarchy.revalue = function(root) { + if (value) { + d3_layout_hierarchyVisitBefore(root, function(node) { + if (node.children) node.value = 0; + }); + d3_layout_hierarchyVisitAfter(root, function(node) { + var parent; + if (!node.children) node.value = +value.call(hierarchy, node, node.depth) || 0; + if (parent = node.parent) parent.value += node.value; + }); + } + return root; + }; + return hierarchy; + }; + function d3_layout_hierarchyRebind(object, hierarchy) { + d3.rebind(object, hierarchy, "sort", "children", "value"); + object.nodes = object; + object.links = d3_layout_hierarchyLinks; + return object; + } + function d3_layout_hierarchyVisitBefore(node, callback) { + var nodes = [ node ]; + while ((node = nodes.pop()) != null) { + callback(node); + if ((children = node.children) && (n = children.length)) { + var n, children; + while (--n >= 0) nodes.push(children[n]); + } + } + } + function d3_layout_hierarchyVisitAfter(node, callback) { + var nodes = [ node ], nodes2 = []; + while ((node = nodes.pop()) != null) { + nodes2.push(node); + if ((children = node.children) && (n = children.length)) { + var i = -1, n, children; + while (++i < n) nodes.push(children[i]); + } + } + while ((node = nodes2.pop()) != null) { + callback(node); + } + } + function d3_layout_hierarchyChildren(d) { + return d.children; + } + function d3_layout_hierarchyValue(d) { + return d.value; + } + function d3_layout_hierarchySort(a, b) { + return b.value - a.value; + } + function d3_layout_hierarchyLinks(nodes) { + return d3.merge(nodes.map(function(parent) { + return (parent.children || []).map(function(child) { + return { + source: parent, + target: child + }; + }); + })); + } + d3.layout.partition = function() { + var hierarchy = d3.layout.hierarchy(), size = [ 1, 1 ]; + function position(node, x, dx, dy) { + var children = node.children; + node.x = x; + node.y = node.depth * dy; + node.dx = dx; + node.dy = dy; + if (children && (n = children.length)) { + var i = -1, n, c, d; + dx = node.value ? dx / node.value : 0; + while (++i < n) { + position(c = children[i], x, d = c.value * dx, dy); + x += d; + } + } + } + function depth(node) { + var children = node.children, d = 0; + if (children && (n = children.length)) { + var i = -1, n; + while (++i < n) d = Math.max(d, depth(children[i])); + } + return 1 + d; + } + function partition(d, i) { + var nodes = hierarchy.call(this, d, i); + position(nodes[0], 0, size[0], size[1] / depth(nodes[0])); + return nodes; + } + partition.size = function(x) { + if (!arguments.length) return size; + size = x; + return partition; + }; + return d3_layout_hierarchyRebind(partition, hierarchy); + }; + d3.layout.pie = function() { + var value = Number, sort = d3_layout_pieSortByValue, startAngle = 0, endAngle = τ, padAngle = 0; + function pie(data) { + var n = data.length, values = data.map(function(d, i) { + return +value.call(pie, d, i); + }), a = +(typeof startAngle === "function" ? startAngle.apply(this, arguments) : startAngle), da = (typeof endAngle === "function" ? endAngle.apply(this, arguments) : endAngle) - a, p = Math.min(Math.abs(da) / n, +(typeof padAngle === "function" ? padAngle.apply(this, arguments) : padAngle)), pa = p * (da < 0 ? -1 : 1), k = (da - n * pa) / d3.sum(values), index = d3.range(n), arcs = [], v; + if (sort != null) index.sort(sort === d3_layout_pieSortByValue ? function(i, j) { + return values[j] - values[i]; + } : function(i, j) { + return sort(data[i], data[j]); + }); + index.forEach(function(i) { + arcs[i] = { + data: data[i], + value: v = values[i], + startAngle: a, + endAngle: a += v * k + pa, + padAngle: p + }; + }); + return arcs; + } + pie.value = function(_) { + if (!arguments.length) return value; + value = _; + return pie; + }; + pie.sort = function(_) { + if (!arguments.length) return sort; + sort = _; + return pie; + }; + pie.startAngle = function(_) { + if (!arguments.length) return startAngle; + startAngle = _; + return pie; + }; + pie.endAngle = function(_) { + if (!arguments.length) return endAngle; + endAngle = _; + return pie; + }; + pie.padAngle = function(_) { + if (!arguments.length) return padAngle; + padAngle = _; + return pie; + }; + return pie; + }; + var d3_layout_pieSortByValue = {}; + d3.layout.stack = function() { + var values = d3_identity, order = d3_layout_stackOrderDefault, offset = d3_layout_stackOffsetZero, out = d3_layout_stackOut, x = d3_layout_stackX, y = d3_layout_stackY; + function stack(data, index) { + if (!(n = data.length)) return data; + var series = data.map(function(d, i) { + return values.call(stack, d, i); + }); + var points = series.map(function(d) { + return d.map(function(v, i) { + return [ x.call(stack, v, i), y.call(stack, v, i) ]; + }); + }); + var orders = order.call(stack, points, index); + series = d3.permute(series, orders); + points = d3.permute(points, orders); + var offsets = offset.call(stack, points, index); + var m = series[0].length, n, i, j, o; + for (j = 0; j < m; ++j) { + out.call(stack, series[0][j], o = offsets[j], points[0][j][1]); + for (i = 1; i < n; ++i) { + out.call(stack, series[i][j], o += points[i - 1][j][1], points[i][j][1]); + } + } + return data; + } + stack.values = function(x) { + if (!arguments.length) return values; + values = x; + return stack; + }; + stack.order = function(x) { + if (!arguments.length) return order; + order = typeof x === "function" ? x : d3_layout_stackOrders.get(x) || d3_layout_stackOrderDefault; + return stack; + }; + stack.offset = function(x) { + if (!arguments.length) return offset; + offset = typeof x === "function" ? x : d3_layout_stackOffsets.get(x) || d3_layout_stackOffsetZero; + return stack; + }; + stack.x = function(z) { + if (!arguments.length) return x; + x = z; + return stack; + }; + stack.y = function(z) { + if (!arguments.length) return y; + y = z; + return stack; + }; + stack.out = function(z) { + if (!arguments.length) return out; + out = z; + return stack; + }; + return stack; + }; + function d3_layout_stackX(d) { + return d.x; + } + function d3_layout_stackY(d) { + return d.y; + } + function d3_layout_stackOut(d, y0, y) { + d.y0 = y0; + d.y = y; + } + var d3_layout_stackOrders = d3.map({ + "inside-out": function(data) { + var n = data.length, i, j, max = data.map(d3_layout_stackMaxIndex), sums = data.map(d3_layout_stackReduceSum), index = d3.range(n).sort(function(a, b) { + return max[a] - max[b]; + }), top = 0, bottom = 0, tops = [], bottoms = []; + for (i = 0; i < n; ++i) { + j = index[i]; + if (top < bottom) { + top += sums[j]; + tops.push(j); + } else { + bottom += sums[j]; + bottoms.push(j); + } + } + return bottoms.reverse().concat(tops); + }, + reverse: function(data) { + return d3.range(data.length).reverse(); + }, + "default": d3_layout_stackOrderDefault + }); + var d3_layout_stackOffsets = d3.map({ + silhouette: function(data) { + var n = data.length, m = data[0].length, sums = [], max = 0, i, j, o, y0 = []; + for (j = 0; j < m; ++j) { + for (i = 0, o = 0; i < n; i++) o += data[i][j][1]; + if (o > max) max = o; + sums.push(o); + } + for (j = 0; j < m; ++j) { + y0[j] = (max - sums[j]) / 2; + } + return y0; + }, + wiggle: function(data) { + var n = data.length, x = data[0], m = x.length, i, j, k, s1, s2, s3, dx, o, o0, y0 = []; + y0[0] = o = o0 = 0; + for (j = 1; j < m; ++j) { + for (i = 0, s1 = 0; i < n; ++i) s1 += data[i][j][1]; + for (i = 0, s2 = 0, dx = x[j][0] - x[j - 1][0]; i < n; ++i) { + for (k = 0, s3 = (data[i][j][1] - data[i][j - 1][1]) / (2 * dx); k < i; ++k) { + s3 += (data[k][j][1] - data[k][j - 1][1]) / dx; + } + s2 += s3 * data[i][j][1]; + } + y0[j] = o -= s1 ? s2 / s1 * dx : 0; + if (o < o0) o0 = o; + } + for (j = 0; j < m; ++j) y0[j] -= o0; + return y0; + }, + expand: function(data) { + var n = data.length, m = data[0].length, k = 1 / n, i, j, o, y0 = []; + for (j = 0; j < m; ++j) { + for (i = 0, o = 0; i < n; i++) o += data[i][j][1]; + if (o) for (i = 0; i < n; i++) data[i][j][1] /= o; else for (i = 0; i < n; i++) data[i][j][1] = k; + } + for (j = 0; j < m; ++j) y0[j] = 0; + return y0; + }, + zero: d3_layout_stackOffsetZero + }); + function d3_layout_stackOrderDefault(data) { + return d3.range(data.length); + } + function d3_layout_stackOffsetZero(data) { + var j = -1, m = data[0].length, y0 = []; + while (++j < m) y0[j] = 0; + return y0; + } + function d3_layout_stackMaxIndex(array) { + var i = 1, j = 0, v = array[0][1], k, n = array.length; + for (;i < n; ++i) { + if ((k = array[i][1]) > v) { + j = i; + v = k; + } + } + return j; + } + function d3_layout_stackReduceSum(d) { + return d.reduce(d3_layout_stackSum, 0); + } + function d3_layout_stackSum(p, d) { + return p + d[1]; + } + d3.layout.histogram = function() { + var frequency = true, valuer = Number, ranger = d3_layout_histogramRange, binner = d3_layout_histogramBinSturges; + function histogram(data, i) { + var bins = [], values = data.map(valuer, this), range = ranger.call(this, values, i), thresholds = binner.call(this, range, values, i), bin, i = -1, n = values.length, m = thresholds.length - 1, k = frequency ? 1 : 1 / n, x; + while (++i < m) { + bin = bins[i] = []; + bin.dx = thresholds[i + 1] - (bin.x = thresholds[i]); + bin.y = 0; + } + if (m > 0) { + i = -1; + while (++i < n) { + x = values[i]; + if (x >= range[0] && x <= range[1]) { + bin = bins[d3.bisect(thresholds, x, 1, m) - 1]; + bin.y += k; + bin.push(data[i]); + } + } + } + return bins; + } + histogram.value = function(x) { + if (!arguments.length) return valuer; + valuer = x; + return histogram; + }; + histogram.range = function(x) { + if (!arguments.length) return ranger; + ranger = d3_functor(x); + return histogram; + }; + histogram.bins = function(x) { + if (!arguments.length) return binner; + binner = typeof x === "number" ? function(range) { + return d3_layout_histogramBinFixed(range, x); + } : d3_functor(x); + return histogram; + }; + histogram.frequency = function(x) { + if (!arguments.length) return frequency; + frequency = !!x; + return histogram; + }; + return histogram; + }; + function d3_layout_histogramBinSturges(range, values) { + return d3_layout_histogramBinFixed(range, Math.ceil(Math.log(values.length) / Math.LN2 + 1)); + } + function d3_layout_histogramBinFixed(range, n) { + var x = -1, b = +range[0], m = (range[1] - b) / n, f = []; + while (++x <= n) f[x] = m * x + b; + return f; + } + function d3_layout_histogramRange(values) { + return [ d3.min(values), d3.max(values) ]; + } + d3.layout.pack = function() { + var hierarchy = d3.layout.hierarchy().sort(d3_layout_packSort), padding = 0, size = [ 1, 1 ], radius; + function pack(d, i) { + var nodes = hierarchy.call(this, d, i), root = nodes[0], w = size[0], h = size[1], r = radius == null ? Math.sqrt : typeof radius === "function" ? radius : function() { + return radius; + }; + root.x = root.y = 0; + d3_layout_hierarchyVisitAfter(root, function(d) { + d.r = +r(d.value); + }); + d3_layout_hierarchyVisitAfter(root, d3_layout_packSiblings); + if (padding) { + var dr = padding * (radius ? 1 : Math.max(2 * root.r / w, 2 * root.r / h)) / 2; + d3_layout_hierarchyVisitAfter(root, function(d) { + d.r += dr; + }); + d3_layout_hierarchyVisitAfter(root, d3_layout_packSiblings); + d3_layout_hierarchyVisitAfter(root, function(d) { + d.r -= dr; + }); + } + d3_layout_packTransform(root, w / 2, h / 2, radius ? 1 : 1 / Math.max(2 * root.r / w, 2 * root.r / h)); + return nodes; + } + pack.size = function(_) { + if (!arguments.length) return size; + size = _; + return pack; + }; + pack.radius = function(_) { + if (!arguments.length) return radius; + radius = _ == null || typeof _ === "function" ? _ : +_; + return pack; + }; + pack.padding = function(_) { + if (!arguments.length) return padding; + padding = +_; + return pack; + }; + return d3_layout_hierarchyRebind(pack, hierarchy); + }; + function d3_layout_packSort(a, b) { + return a.value - b.value; + } + function d3_layout_packInsert(a, b) { + var c = a._pack_next; + a._pack_next = b; + b._pack_prev = a; + b._pack_next = c; + c._pack_prev = b; + } + function d3_layout_packSplice(a, b) { + a._pack_next = b; + b._pack_prev = a; + } + function d3_layout_packIntersects(a, b) { + var dx = b.x - a.x, dy = b.y - a.y, dr = a.r + b.r; + return .999 * dr * dr > dx * dx + dy * dy; + } + function d3_layout_packSiblings(node) { + if (!(nodes = node.children) || !(n = nodes.length)) return; + var nodes, xMin = Infinity, xMax = -Infinity, yMin = Infinity, yMax = -Infinity, a, b, c, i, j, k, n; + function bound(node) { + xMin = Math.min(node.x - node.r, xMin); + xMax = Math.max(node.x + node.r, xMax); + yMin = Math.min(node.y - node.r, yMin); + yMax = Math.max(node.y + node.r, yMax); + } + nodes.forEach(d3_layout_packLink); + a = nodes[0]; + a.x = -a.r; + a.y = 0; + bound(a); + if (n > 1) { + b = nodes[1]; + b.x = b.r; + b.y = 0; + bound(b); + if (n > 2) { + c = nodes[2]; + d3_layout_packPlace(a, b, c); + bound(c); + d3_layout_packInsert(a, c); + a._pack_prev = c; + d3_layout_packInsert(c, b); + b = a._pack_next; + for (i = 3; i < n; i++) { + d3_layout_packPlace(a, b, c = nodes[i]); + var isect = 0, s1 = 1, s2 = 1; + for (j = b._pack_next; j !== b; j = j._pack_next, s1++) { + if (d3_layout_packIntersects(j, c)) { + isect = 1; + break; + } + } + if (isect == 1) { + for (k = a._pack_prev; k !== j._pack_prev; k = k._pack_prev, s2++) { + if (d3_layout_packIntersects(k, c)) { + break; + } + } + } + if (isect) { + if (s1 < s2 || s1 == s2 && b.r < a.r) d3_layout_packSplice(a, b = j); else d3_layout_packSplice(a = k, b); + i--; + } else { + d3_layout_packInsert(a, c); + b = c; + bound(c); + } + } + } + } + var cx = (xMin + xMax) / 2, cy = (yMin + yMax) / 2, cr = 0; + for (i = 0; i < n; i++) { + c = nodes[i]; + c.x -= cx; + c.y -= cy; + cr = Math.max(cr, c.r + Math.sqrt(c.x * c.x + c.y * c.y)); + } + node.r = cr; + nodes.forEach(d3_layout_packUnlink); + } + function d3_layout_packLink(node) { + node._pack_next = node._pack_prev = node; + } + function d3_layout_packUnlink(node) { + delete node._pack_next; + delete node._pack_prev; + } + function d3_layout_packTransform(node, x, y, k) { + var children = node.children; + node.x = x += k * node.x; + node.y = y += k * node.y; + node.r *= k; + if (children) { + var i = -1, n = children.length; + while (++i < n) d3_layout_packTransform(children[i], x, y, k); + } + } + function d3_layout_packPlace(a, b, c) { + var db = a.r + c.r, dx = b.x - a.x, dy = b.y - a.y; + if (db && (dx || dy)) { + var da = b.r + c.r, dc = dx * dx + dy * dy; + da *= da; + db *= db; + var x = .5 + (db - da) / (2 * dc), y = Math.sqrt(Math.max(0, 2 * da * (db + dc) - (db -= dc) * db - da * da)) / (2 * dc); + c.x = a.x + x * dx + y * dy; + c.y = a.y + x * dy - y * dx; + } else { + c.x = a.x + db; + c.y = a.y; + } + } + d3.layout.tree = function() { + var hierarchy = d3.layout.hierarchy().sort(null).value(null), separation = d3_layout_treeSeparation, size = [ 1, 1 ], nodeSize = null; + function tree(d, i) { + var nodes = hierarchy.call(this, d, i), root0 = nodes[0], root1 = wrapTree(root0); + d3_layout_hierarchyVisitAfter(root1, firstWalk), root1.parent.m = -root1.z; + d3_layout_hierarchyVisitBefore(root1, secondWalk); + if (nodeSize) d3_layout_hierarchyVisitBefore(root0, sizeNode); else { + var left = root0, right = root0, bottom = root0; + d3_layout_hierarchyVisitBefore(root0, function(node) { + if (node.x < left.x) left = node; + if (node.x > right.x) right = node; + if (node.depth > bottom.depth) bottom = node; + }); + var tx = separation(left, right) / 2 - left.x, kx = size[0] / (right.x + separation(right, left) / 2 + tx), ky = size[1] / (bottom.depth || 1); + d3_layout_hierarchyVisitBefore(root0, function(node) { + node.x = (node.x + tx) * kx; + node.y = node.depth * ky; + }); + } + return nodes; + } + function wrapTree(root0) { + var root1 = { + A: null, + children: [ root0 ] + }, queue = [ root1 ], node1; + while ((node1 = queue.pop()) != null) { + for (var children = node1.children, child, i = 0, n = children.length; i < n; ++i) { + queue.push((children[i] = child = { + _: children[i], + parent: node1, + children: (child = children[i].children) && child.slice() || [], + A: null, + a: null, + z: 0, + m: 0, + c: 0, + s: 0, + t: null, + i: i + }).a = child); + } + } + return root1.children[0]; + } + function firstWalk(v) { + var children = v.children, siblings = v.parent.children, w = v.i ? siblings[v.i - 1] : null; + if (children.length) { + d3_layout_treeShift(v); + var midpoint = (children[0].z + children[children.length - 1].z) / 2; + if (w) { + v.z = w.z + separation(v._, w._); + v.m = v.z - midpoint; + } else { + v.z = midpoint; + } + } else if (w) { + v.z = w.z + separation(v._, w._); + } + v.parent.A = apportion(v, w, v.parent.A || siblings[0]); + } + function secondWalk(v) { + v._.x = v.z + v.parent.m; + v.m += v.parent.m; + } + function apportion(v, w, ancestor) { + if (w) { + var vip = v, vop = v, vim = w, vom = vip.parent.children[0], sip = vip.m, sop = vop.m, sim = vim.m, som = vom.m, shift; + while (vim = d3_layout_treeRight(vim), vip = d3_layout_treeLeft(vip), vim && vip) { + vom = d3_layout_treeLeft(vom); + vop = d3_layout_treeRight(vop); + vop.a = v; + shift = vim.z + sim - vip.z - sip + separation(vim._, vip._); + if (shift > 0) { + d3_layout_treeMove(d3_layout_treeAncestor(vim, v, ancestor), v, shift); + sip += shift; + sop += shift; + } + sim += vim.m; + sip += vip.m; + som += vom.m; + sop += vop.m; + } + if (vim && !d3_layout_treeRight(vop)) { + vop.t = vim; + vop.m += sim - sop; + } + if (vip && !d3_layout_treeLeft(vom)) { + vom.t = vip; + vom.m += sip - som; + ancestor = v; + } + } + return ancestor; + } + function sizeNode(node) { + node.x *= size[0]; + node.y = node.depth * size[1]; + } + tree.separation = function(x) { + if (!arguments.length) return separation; + separation = x; + return tree; + }; + tree.size = function(x) { + if (!arguments.length) return nodeSize ? null : size; + nodeSize = (size = x) == null ? sizeNode : null; + return tree; + }; + tree.nodeSize = function(x) { + if (!arguments.length) return nodeSize ? size : null; + nodeSize = (size = x) == null ? null : sizeNode; + return tree; + }; + return d3_layout_hierarchyRebind(tree, hierarchy); + }; + function d3_layout_treeSeparation(a, b) { + return a.parent == b.parent ? 1 : 2; + } + function d3_layout_treeLeft(v) { + var children = v.children; + return children.length ? children[0] : v.t; + } + function d3_layout_treeRight(v) { + var children = v.children, n; + return (n = children.length) ? children[n - 1] : v.t; + } + function d3_layout_treeMove(wm, wp, shift) { + var change = shift / (wp.i - wm.i); + wp.c -= change; + wp.s += shift; + wm.c += change; + wp.z += shift; + wp.m += shift; + } + function d3_layout_treeShift(v) { + var shift = 0, change = 0, children = v.children, i = children.length, w; + while (--i >= 0) { + w = children[i]; + w.z += shift; + w.m += shift; + shift += w.s + (change += w.c); + } + } + function d3_layout_treeAncestor(vim, v, ancestor) { + return vim.a.parent === v.parent ? vim.a : ancestor; + } + d3.layout.cluster = function() { + var hierarchy = d3.layout.hierarchy().sort(null).value(null), separation = d3_layout_treeSeparation, size = [ 1, 1 ], nodeSize = false; + function cluster(d, i) { + var nodes = hierarchy.call(this, d, i), root = nodes[0], previousNode, x = 0; + d3_layout_hierarchyVisitAfter(root, function(node) { + var children = node.children; + if (children && children.length) { + node.x = d3_layout_clusterX(children); + node.y = d3_layout_clusterY(children); + } else { + node.x = previousNode ? x += separation(node, previousNode) : 0; + node.y = 0; + previousNode = node; + } + }); + var left = d3_layout_clusterLeft(root), right = d3_layout_clusterRight(root), x0 = left.x - separation(left, right) / 2, x1 = right.x + separation(right, left) / 2; + d3_layout_hierarchyVisitAfter(root, nodeSize ? function(node) { + node.x = (node.x - root.x) * size[0]; + node.y = (root.y - node.y) * size[1]; + } : function(node) { + node.x = (node.x - x0) / (x1 - x0) * size[0]; + node.y = (1 - (root.y ? node.y / root.y : 1)) * size[1]; + }); + return nodes; + } + cluster.separation = function(x) { + if (!arguments.length) return separation; + separation = x; + return cluster; + }; + cluster.size = function(x) { + if (!arguments.length) return nodeSize ? null : size; + nodeSize = (size = x) == null; + return cluster; + }; + cluster.nodeSize = function(x) { + if (!arguments.length) return nodeSize ? size : null; + nodeSize = (size = x) != null; + return cluster; + }; + return d3_layout_hierarchyRebind(cluster, hierarchy); + }; + function d3_layout_clusterY(children) { + return 1 + d3.max(children, function(child) { + return child.y; + }); + } + function d3_layout_clusterX(children) { + return children.reduce(function(x, child) { + return x + child.x; + }, 0) / children.length; + } + function d3_layout_clusterLeft(node) { + var children = node.children; + return children && children.length ? d3_layout_clusterLeft(children[0]) : node; + } + function d3_layout_clusterRight(node) { + var children = node.children, n; + return children && (n = children.length) ? d3_layout_clusterRight(children[n - 1]) : node; + } + d3.layout.treemap = function() { + var hierarchy = d3.layout.hierarchy(), round = Math.round, size = [ 1, 1 ], padding = null, pad = d3_layout_treemapPadNull, sticky = false, stickies, mode = "squarify", ratio = .5 * (1 + Math.sqrt(5)); + function scale(children, k) { + var i = -1, n = children.length, child, area; + while (++i < n) { + area = (child = children[i]).value * (k < 0 ? 0 : k); + child.area = isNaN(area) || area <= 0 ? 0 : area; + } + } + function squarify(node) { + var children = node.children; + if (children && children.length) { + var rect = pad(node), row = [], remaining = children.slice(), child, best = Infinity, score, u = mode === "slice" ? rect.dx : mode === "dice" ? rect.dy : mode === "slice-dice" ? node.depth & 1 ? rect.dy : rect.dx : Math.min(rect.dx, rect.dy), n; + scale(remaining, rect.dx * rect.dy / node.value); + row.area = 0; + while ((n = remaining.length) > 0) { + row.push(child = remaining[n - 1]); + row.area += child.area; + if (mode !== "squarify" || (score = worst(row, u)) <= best) { + remaining.pop(); + best = score; + } else { + row.area -= row.pop().area; + position(row, u, rect, false); + u = Math.min(rect.dx, rect.dy); + row.length = row.area = 0; + best = Infinity; + } + } + if (row.length) { + position(row, u, rect, true); + row.length = row.area = 0; + } + children.forEach(squarify); + } + } + function stickify(node) { + var children = node.children; + if (children && children.length) { + var rect = pad(node), remaining = children.slice(), child, row = []; + scale(remaining, rect.dx * rect.dy / node.value); + row.area = 0; + while (child = remaining.pop()) { + row.push(child); + row.area += child.area; + if (child.z != null) { + position(row, child.z ? rect.dx : rect.dy, rect, !remaining.length); + row.length = row.area = 0; + } + } + children.forEach(stickify); + } + } + function worst(row, u) { + var s = row.area, r, rmax = 0, rmin = Infinity, i = -1, n = row.length; + while (++i < n) { + if (!(r = row[i].area)) continue; + if (r < rmin) rmin = r; + if (r > rmax) rmax = r; + } + s *= s; + u *= u; + return s ? Math.max(u * rmax * ratio / s, s / (u * rmin * ratio)) : Infinity; + } + function position(row, u, rect, flush) { + var i = -1, n = row.length, x = rect.x, y = rect.y, v = u ? round(row.area / u) : 0, o; + if (u == rect.dx) { + if (flush || v > rect.dy) v = rect.dy; + while (++i < n) { + o = row[i]; + o.x = x; + o.y = y; + o.dy = v; + x += o.dx = Math.min(rect.x + rect.dx - x, v ? round(o.area / v) : 0); + } + o.z = true; + o.dx += rect.x + rect.dx - x; + rect.y += v; + rect.dy -= v; + } else { + if (flush || v > rect.dx) v = rect.dx; + while (++i < n) { + o = row[i]; + o.x = x; + o.y = y; + o.dx = v; + y += o.dy = Math.min(rect.y + rect.dy - y, v ? round(o.area / v) : 0); + } + o.z = false; + o.dy += rect.y + rect.dy - y; + rect.x += v; + rect.dx -= v; + } + } + function treemap(d) { + var nodes = stickies || hierarchy(d), root = nodes[0]; + root.x = 0; + root.y = 0; + root.dx = size[0]; + root.dy = size[1]; + if (stickies) hierarchy.revalue(root); + scale([ root ], root.dx * root.dy / root.value); + (stickies ? stickify : squarify)(root); + if (sticky) stickies = nodes; + return nodes; + } + treemap.size = function(x) { + if (!arguments.length) return size; + size = x; + return treemap; + }; + treemap.padding = function(x) { + if (!arguments.length) return padding; + function padFunction(node) { + var p = x.call(treemap, node, node.depth); + return p == null ? d3_layout_treemapPadNull(node) : d3_layout_treemapPad(node, typeof p === "number" ? [ p, p, p, p ] : p); + } + function padConstant(node) { + return d3_layout_treemapPad(node, x); + } + var type; + pad = (padding = x) == null ? d3_layout_treemapPadNull : (type = typeof x) === "function" ? padFunction : type === "number" ? (x = [ x, x, x, x ], + padConstant) : padConstant; + return treemap; + }; + treemap.round = function(x) { + if (!arguments.length) return round != Number; + round = x ? Math.round : Number; + return treemap; + }; + treemap.sticky = function(x) { + if (!arguments.length) return sticky; + sticky = x; + stickies = null; + return treemap; + }; + treemap.ratio = function(x) { + if (!arguments.length) return ratio; + ratio = x; + return treemap; + }; + treemap.mode = function(x) { + if (!arguments.length) return mode; + mode = x + ""; + return treemap; + }; + return d3_layout_hierarchyRebind(treemap, hierarchy); + }; + function d3_layout_treemapPadNull(node) { + return { + x: node.x, + y: node.y, + dx: node.dx, + dy: node.dy + }; + } + function d3_layout_treemapPad(node, padding) { + var x = node.x + padding[3], y = node.y + padding[0], dx = node.dx - padding[1] - padding[3], dy = node.dy - padding[0] - padding[2]; + if (dx < 0) { + x += dx / 2; + dx = 0; + } + if (dy < 0) { + y += dy / 2; + dy = 0; + } + return { + x: x, + y: y, + dx: dx, + dy: dy + }; + } + d3.random = { + normal: function(µ, σ) { + var n = arguments.length; + if (n < 2) σ = 1; + if (n < 1) µ = 0; + return function() { + var x, y, r; + do { + x = Math.random() * 2 - 1; + y = Math.random() * 2 - 1; + r = x * x + y * y; + } while (!r || r > 1); + return µ + σ * x * Math.sqrt(-2 * Math.log(r) / r); + }; + }, + logNormal: function() { + var random = d3.random.normal.apply(d3, arguments); + return function() { + return Math.exp(random()); + }; + }, + bates: function(m) { + var random = d3.random.irwinHall(m); + return function() { + return random() / m; + }; + }, + irwinHall: function(m) { + return function() { + for (var s = 0, j = 0; j < m; j++) s += Math.random(); + return s; + }; + } + }; + d3.scale = {}; + function d3_scaleExtent(domain) { + var start = domain[0], stop = domain[domain.length - 1]; + return start < stop ? [ start, stop ] : [ stop, start ]; + } + function d3_scaleRange(scale) { + return scale.rangeExtent ? scale.rangeExtent() : d3_scaleExtent(scale.range()); + } + function d3_scale_bilinear(domain, range, uninterpolate, interpolate) { + var u = uninterpolate(domain[0], domain[1]), i = interpolate(range[0], range[1]); + return function(x) { + return i(u(x)); + }; + } + function d3_scale_nice(domain, nice) { + var i0 = 0, i1 = domain.length - 1, x0 = domain[i0], x1 = domain[i1], dx; + if (x1 < x0) { + dx = i0, i0 = i1, i1 = dx; + dx = x0, x0 = x1, x1 = dx; + } + domain[i0] = nice.floor(x0); + domain[i1] = nice.ceil(x1); + return domain; + } + function d3_scale_niceStep(step) { + return step ? { + floor: function(x) { + return Math.floor(x / step) * step; + }, + ceil: function(x) { + return Math.ceil(x / step) * step; + } + } : d3_scale_niceIdentity; + } + var d3_scale_niceIdentity = { + floor: d3_identity, + ceil: d3_identity + }; + function d3_scale_polylinear(domain, range, uninterpolate, interpolate) { + var u = [], i = [], j = 0, k = Math.min(domain.length, range.length) - 1; + if (domain[k] < domain[0]) { + domain = domain.slice().reverse(); + range = range.slice().reverse(); + } + while (++j <= k) { + u.push(uninterpolate(domain[j - 1], domain[j])); + i.push(interpolate(range[j - 1], range[j])); + } + return function(x) { + var j = d3.bisect(domain, x, 1, k) - 1; + return i[j](u[j](x)); + }; + } + d3.scale.linear = function() { + return d3_scale_linear([ 0, 1 ], [ 0, 1 ], d3_interpolate, false); + }; + function d3_scale_linear(domain, range, interpolate, clamp) { + var output, input; + function rescale() { + var linear = Math.min(domain.length, range.length) > 2 ? d3_scale_polylinear : d3_scale_bilinear, uninterpolate = clamp ? d3_uninterpolateClamp : d3_uninterpolateNumber; + output = linear(domain, range, uninterpolate, interpolate); + input = linear(range, domain, uninterpolate, d3_interpolate); + return scale; + } + function scale(x) { + return output(x); + } + scale.invert = function(y) { + return input(y); + }; + scale.domain = function(x) { + if (!arguments.length) return domain; + domain = x.map(Number); + return rescale(); + }; + scale.range = function(x) { + if (!arguments.length) return range; + range = x; + return rescale(); + }; + scale.rangeRound = function(x) { + return scale.range(x).interpolate(d3_interpolateRound); + }; + scale.clamp = function(x) { + if (!arguments.length) return clamp; + clamp = x; + return rescale(); + }; + scale.interpolate = function(x) { + if (!arguments.length) return interpolate; + interpolate = x; + return rescale(); + }; + scale.ticks = function(m) { + return d3_scale_linearTicks(domain, m); + }; + scale.tickFormat = function(m, format) { + return d3_scale_linearTickFormat(domain, m, format); + }; + scale.nice = function(m) { + d3_scale_linearNice(domain, m); + return rescale(); + }; + scale.copy = function() { + return d3_scale_linear(domain, range, interpolate, clamp); + }; + return rescale(); + } + function d3_scale_linearRebind(scale, linear) { + return d3.rebind(scale, linear, "range", "rangeRound", "interpolate", "clamp"); + } + function d3_scale_linearNice(domain, m) { + return d3_scale_nice(domain, d3_scale_niceStep(d3_scale_linearTickRange(domain, m)[2])); + } + function d3_scale_linearTickRange(domain, m) { + if (m == null) m = 10; + var extent = d3_scaleExtent(domain), span = extent[1] - extent[0], step = Math.pow(10, Math.floor(Math.log(span / m) / Math.LN10)), err = m / span * step; + if (err <= .15) step *= 10; else if (err <= .35) step *= 5; else if (err <= .75) step *= 2; + extent[0] = Math.ceil(extent[0] / step) * step; + extent[1] = Math.floor(extent[1] / step) * step + step * .5; + extent[2] = step; + return extent; + } + function d3_scale_linearTicks(domain, m) { + return d3.range.apply(d3, d3_scale_linearTickRange(domain, m)); + } + function d3_scale_linearTickFormat(domain, m, format) { + var range = d3_scale_linearTickRange(domain, m); + if (format) { + var match = d3_format_re.exec(format); + match.shift(); + if (match[8] === "s") { + var prefix = d3.formatPrefix(Math.max(abs(range[0]), abs(range[1]))); + if (!match[7]) match[7] = "." + d3_scale_linearPrecision(prefix.scale(range[2])); + match[8] = "f"; + format = d3.format(match.join("")); + return function(d) { + return format(prefix.scale(d)) + prefix.symbol; + }; + } + if (!match[7]) match[7] = "." + d3_scale_linearFormatPrecision(match[8], range); + format = match.join(""); + } else { + format = ",." + d3_scale_linearPrecision(range[2]) + "f"; + } + return d3.format(format); + } + var d3_scale_linearFormatSignificant = { + s: 1, + g: 1, + p: 1, + r: 1, + e: 1 + }; + function d3_scale_linearPrecision(value) { + return -Math.floor(Math.log(value) / Math.LN10 + .01); + } + function d3_scale_linearFormatPrecision(type, range) { + var p = d3_scale_linearPrecision(range[2]); + return type in d3_scale_linearFormatSignificant ? Math.abs(p - d3_scale_linearPrecision(Math.max(abs(range[0]), abs(range[1])))) + +(type !== "e") : p - (type === "%") * 2; + } + d3.scale.log = function() { + return d3_scale_log(d3.scale.linear().domain([ 0, 1 ]), 10, true, [ 1, 10 ]); + }; + function d3_scale_log(linear, base, positive, domain) { + function log(x) { + return (positive ? Math.log(x < 0 ? 0 : x) : -Math.log(x > 0 ? 0 : -x)) / Math.log(base); + } + function pow(x) { + return positive ? Math.pow(base, x) : -Math.pow(base, -x); + } + function scale(x) { + return linear(log(x)); + } + scale.invert = function(x) { + return pow(linear.invert(x)); + }; + scale.domain = function(x) { + if (!arguments.length) return domain; + positive = x[0] >= 0; + linear.domain((domain = x.map(Number)).map(log)); + return scale; + }; + scale.base = function(_) { + if (!arguments.length) return base; + base = +_; + linear.domain(domain.map(log)); + return scale; + }; + scale.nice = function() { + var niced = d3_scale_nice(domain.map(log), positive ? Math : d3_scale_logNiceNegative); + linear.domain(niced); + domain = niced.map(pow); + return scale; + }; + scale.ticks = function() { + var extent = d3_scaleExtent(domain), ticks = [], u = extent[0], v = extent[1], i = Math.floor(log(u)), j = Math.ceil(log(v)), n = base % 1 ? 2 : base; + if (isFinite(j - i)) { + if (positive) { + for (;i < j; i++) for (var k = 1; k < n; k++) ticks.push(pow(i) * k); + ticks.push(pow(i)); + } else { + ticks.push(pow(i)); + for (;i++ < j; ) for (var k = n - 1; k > 0; k--) ticks.push(pow(i) * k); + } + for (i = 0; ticks[i] < u; i++) {} + for (j = ticks.length; ticks[j - 1] > v; j--) {} + ticks = ticks.slice(i, j); + } + return ticks; + }; + scale.tickFormat = function(n, format) { + if (!arguments.length) return d3_scale_logFormat; + if (arguments.length < 2) format = d3_scale_logFormat; else if (typeof format !== "function") format = d3.format(format); + var k = Math.max(.1, n / scale.ticks().length), f = positive ? (e = 1e-12, Math.ceil) : (e = -1e-12, + Math.floor), e; + return function(d) { + return d / pow(f(log(d) + e)) <= k ? format(d) : ""; + }; + }; + scale.copy = function() { + return d3_scale_log(linear.copy(), base, positive, domain); + }; + return d3_scale_linearRebind(scale, linear); + } + var d3_scale_logFormat = d3.format(".0e"), d3_scale_logNiceNegative = { + floor: function(x) { + return -Math.ceil(-x); + }, + ceil: function(x) { + return -Math.floor(-x); + } + }; + d3.scale.pow = function() { + return d3_scale_pow(d3.scale.linear(), 1, [ 0, 1 ]); + }; + function d3_scale_pow(linear, exponent, domain) { + var powp = d3_scale_powPow(exponent), powb = d3_scale_powPow(1 / exponent); + function scale(x) { + return linear(powp(x)); + } + scale.invert = function(x) { + return powb(linear.invert(x)); + }; + scale.domain = function(x) { + if (!arguments.length) return domain; + linear.domain((domain = x.map(Number)).map(powp)); + return scale; + }; + scale.ticks = function(m) { + return d3_scale_linearTicks(domain, m); + }; + scale.tickFormat = function(m, format) { + return d3_scale_linearTickFormat(domain, m, format); + }; + scale.nice = function(m) { + return scale.domain(d3_scale_linearNice(domain, m)); + }; + scale.exponent = function(x) { + if (!arguments.length) return exponent; + powp = d3_scale_powPow(exponent = x); + powb = d3_scale_powPow(1 / exponent); + linear.domain(domain.map(powp)); + return scale; + }; + scale.copy = function() { + return d3_scale_pow(linear.copy(), exponent, domain); + }; + return d3_scale_linearRebind(scale, linear); + } + function d3_scale_powPow(e) { + return function(x) { + return x < 0 ? -Math.pow(-x, e) : Math.pow(x, e); + }; + } + d3.scale.sqrt = function() { + return d3.scale.pow().exponent(.5); + }; + d3.scale.ordinal = function() { + return d3_scale_ordinal([], { + t: "range", + a: [ [] ] + }); + }; + function d3_scale_ordinal(domain, ranger) { + var index, range, rangeBand; + function scale(x) { + return range[((index.get(x) || (ranger.t === "range" ? index.set(x, domain.push(x)) : NaN)) - 1) % range.length]; + } + function steps(start, step) { + return d3.range(domain.length).map(function(i) { + return start + step * i; + }); + } + scale.domain = function(x) { + if (!arguments.length) return domain; + domain = []; + index = new d3_Map(); + var i = -1, n = x.length, xi; + while (++i < n) if (!index.has(xi = x[i])) index.set(xi, domain.push(xi)); + return scale[ranger.t].apply(scale, ranger.a); + }; + scale.range = function(x) { + if (!arguments.length) return range; + range = x; + rangeBand = 0; + ranger = { + t: "range", + a: arguments + }; + return scale; + }; + scale.rangePoints = function(x, padding) { + if (arguments.length < 2) padding = 0; + var start = x[0], stop = x[1], step = domain.length < 2 ? (start = (start + stop) / 2, + 0) : (stop - start) / (domain.length - 1 + padding); + range = steps(start + step * padding / 2, step); + rangeBand = 0; + ranger = { + t: "rangePoints", + a: arguments + }; + return scale; + }; + scale.rangeRoundPoints = function(x, padding) { + if (arguments.length < 2) padding = 0; + var start = x[0], stop = x[1], step = domain.length < 2 ? (start = stop = Math.round((start + stop) / 2), + 0) : (stop - start) / (domain.length - 1 + padding) | 0; + range = steps(start + Math.round(step * padding / 2 + (stop - start - (domain.length - 1 + padding) * step) / 2), step); + rangeBand = 0; + ranger = { + t: "rangeRoundPoints", + a: arguments + }; + return scale; + }; + scale.rangeBands = function(x, padding, outerPadding) { + if (arguments.length < 2) padding = 0; + if (arguments.length < 3) outerPadding = padding; + var reverse = x[1] < x[0], start = x[reverse - 0], stop = x[1 - reverse], step = (stop - start) / (domain.length - padding + 2 * outerPadding); + range = steps(start + step * outerPadding, step); + if (reverse) range.reverse(); + rangeBand = step * (1 - padding); + ranger = { + t: "rangeBands", + a: arguments + }; + return scale; + }; + scale.rangeRoundBands = function(x, padding, outerPadding) { + if (arguments.length < 2) padding = 0; + if (arguments.length < 3) outerPadding = padding; + var reverse = x[1] < x[0], start = x[reverse - 0], stop = x[1 - reverse], step = Math.floor((stop - start) / (domain.length - padding + 2 * outerPadding)); + range = steps(start + Math.round((stop - start - (domain.length - padding) * step) / 2), step); + if (reverse) range.reverse(); + rangeBand = Math.round(step * (1 - padding)); + ranger = { + t: "rangeRoundBands", + a: arguments + }; + return scale; + }; + scale.rangeBand = function() { + return rangeBand; + }; + scale.rangeExtent = function() { + return d3_scaleExtent(ranger.a[0]); + }; + scale.copy = function() { + return d3_scale_ordinal(domain, ranger); + }; + return scale.domain(domain); + } + d3.scale.category10 = function() { + return d3.scale.ordinal().range(d3_category10); + }; + d3.scale.category20 = function() { + return d3.scale.ordinal().range(d3_category20); + }; + d3.scale.category20b = function() { + return d3.scale.ordinal().range(d3_category20b); + }; + d3.scale.category20c = function() { + return d3.scale.ordinal().range(d3_category20c); + }; + var d3_category10 = [ 2062260, 16744206, 2924588, 14034728, 9725885, 9197131, 14907330, 8355711, 12369186, 1556175 ].map(d3_rgbString); + var d3_category20 = [ 2062260, 11454440, 16744206, 16759672, 2924588, 10018698, 14034728, 16750742, 9725885, 12955861, 9197131, 12885140, 14907330, 16234194, 8355711, 13092807, 12369186, 14408589, 1556175, 10410725 ].map(d3_rgbString); + var d3_category20b = [ 3750777, 5395619, 7040719, 10264286, 6519097, 9216594, 11915115, 13556636, 9202993, 12426809, 15186514, 15190932, 8666169, 11356490, 14049643, 15177372, 8077683, 10834324, 13528509, 14589654 ].map(d3_rgbString); + var d3_category20c = [ 3244733, 7057110, 10406625, 13032431, 15095053, 16616764, 16625259, 16634018, 3253076, 7652470, 10607003, 13101504, 7695281, 10394312, 12369372, 14342891, 6513507, 9868950, 12434877, 14277081 ].map(d3_rgbString); + d3.scale.quantile = function() { + return d3_scale_quantile([], []); + }; + function d3_scale_quantile(domain, range) { + var thresholds; + function rescale() { + var k = 0, q = range.length; + thresholds = []; + while (++k < q) thresholds[k - 1] = d3.quantile(domain, k / q); + return scale; + } + function scale(x) { + if (!isNaN(x = +x)) return range[d3.bisect(thresholds, x)]; + } + scale.domain = function(x) { + if (!arguments.length) return domain; + domain = x.map(d3_number).filter(d3_numeric).sort(d3_ascending); + return rescale(); + }; + scale.range = function(x) { + if (!arguments.length) return range; + range = x; + return rescale(); + }; + scale.quantiles = function() { + return thresholds; + }; + scale.invertExtent = function(y) { + y = range.indexOf(y); + return y < 0 ? [ NaN, NaN ] : [ y > 0 ? thresholds[y - 1] : domain[0], y < thresholds.length ? thresholds[y] : domain[domain.length - 1] ]; + }; + scale.copy = function() { + return d3_scale_quantile(domain, range); + }; + return rescale(); + } + d3.scale.quantize = function() { + return d3_scale_quantize(0, 1, [ 0, 1 ]); + }; + function d3_scale_quantize(x0, x1, range) { + var kx, i; + function scale(x) { + return range[Math.max(0, Math.min(i, Math.floor(kx * (x - x0))))]; + } + function rescale() { + kx = range.length / (x1 - x0); + i = range.length - 1; + return scale; + } + scale.domain = function(x) { + if (!arguments.length) return [ x0, x1 ]; + x0 = +x[0]; + x1 = +x[x.length - 1]; + return rescale(); + }; + scale.range = function(x) { + if (!arguments.length) return range; + range = x; + return rescale(); + }; + scale.invertExtent = function(y) { + y = range.indexOf(y); + y = y < 0 ? NaN : y / kx + x0; + return [ y, y + 1 / kx ]; + }; + scale.copy = function() { + return d3_scale_quantize(x0, x1, range); + }; + return rescale(); + } + d3.scale.threshold = function() { + return d3_scale_threshold([ .5 ], [ 0, 1 ]); + }; + function d3_scale_threshold(domain, range) { + function scale(x) { + if (x <= x) return range[d3.bisect(domain, x)]; + } + scale.domain = function(_) { + if (!arguments.length) return domain; + domain = _; + return scale; + }; + scale.range = function(_) { + if (!arguments.length) return range; + range = _; + return scale; + }; + scale.invertExtent = function(y) { + y = range.indexOf(y); + return [ domain[y - 1], domain[y] ]; + }; + scale.copy = function() { + return d3_scale_threshold(domain, range); + }; + return scale; + } + d3.scale.identity = function() { + return d3_scale_identity([ 0, 1 ]); + }; + function d3_scale_identity(domain) { + function identity(x) { + return +x; + } + identity.invert = identity; + identity.domain = identity.range = function(x) { + if (!arguments.length) return domain; + domain = x.map(identity); + return identity; + }; + identity.ticks = function(m) { + return d3_scale_linearTicks(domain, m); + }; + identity.tickFormat = function(m, format) { + return d3_scale_linearTickFormat(domain, m, format); + }; + identity.copy = function() { + return d3_scale_identity(domain); + }; + return identity; + } + d3.svg = {}; + function d3_zero() { + return 0; + } + d3.svg.arc = function() { + var innerRadius = d3_svg_arcInnerRadius, outerRadius = d3_svg_arcOuterRadius, cornerRadius = d3_zero, padRadius = d3_svg_arcAuto, startAngle = d3_svg_arcStartAngle, endAngle = d3_svg_arcEndAngle, padAngle = d3_svg_arcPadAngle; + function arc() { + var r0 = Math.max(0, +innerRadius.apply(this, arguments)), r1 = Math.max(0, +outerRadius.apply(this, arguments)), a0 = startAngle.apply(this, arguments) - halfπ, a1 = endAngle.apply(this, arguments) - halfπ, da = Math.abs(a1 - a0), cw = a0 > a1 ? 0 : 1; + if (r1 < r0) rc = r1, r1 = r0, r0 = rc; + if (da >= τε) return circleSegment(r1, cw) + (r0 ? circleSegment(r0, 1 - cw) : "") + "Z"; + var rc, cr, rp, ap, p0 = 0, p1 = 0, x0, y0, x1, y1, x2, y2, x3, y3, path = []; + if (ap = (+padAngle.apply(this, arguments) || 0) / 2) { + rp = padRadius === d3_svg_arcAuto ? Math.sqrt(r0 * r0 + r1 * r1) : +padRadius.apply(this, arguments); + if (!cw) p1 *= -1; + if (r1) p1 = d3_asin(rp / r1 * Math.sin(ap)); + if (r0) p0 = d3_asin(rp / r0 * Math.sin(ap)); + } + if (r1) { + x0 = r1 * Math.cos(a0 + p1); + y0 = r1 * Math.sin(a0 + p1); + x1 = r1 * Math.cos(a1 - p1); + y1 = r1 * Math.sin(a1 - p1); + var l1 = Math.abs(a1 - a0 - 2 * p1) <= π ? 0 : 1; + if (p1 && d3_svg_arcSweep(x0, y0, x1, y1) === cw ^ l1) { + var h1 = (a0 + a1) / 2; + x0 = r1 * Math.cos(h1); + y0 = r1 * Math.sin(h1); + x1 = y1 = null; + } + } else { + x0 = y0 = 0; + } + if (r0) { + x2 = r0 * Math.cos(a1 - p0); + y2 = r0 * Math.sin(a1 - p0); + x3 = r0 * Math.cos(a0 + p0); + y3 = r0 * Math.sin(a0 + p0); + var l0 = Math.abs(a0 - a1 + 2 * p0) <= π ? 0 : 1; + if (p0 && d3_svg_arcSweep(x2, y2, x3, y3) === 1 - cw ^ l0) { + var h0 = (a0 + a1) / 2; + x2 = r0 * Math.cos(h0); + y2 = r0 * Math.sin(h0); + x3 = y3 = null; + } + } else { + x2 = y2 = 0; + } + if ((rc = Math.min(Math.abs(r1 - r0) / 2, +cornerRadius.apply(this, arguments))) > .001) { + cr = r0 < r1 ^ cw ? 0 : 1; + var oc = x3 == null ? [ x2, y2 ] : x1 == null ? [ x0, y0 ] : d3_geom_polygonIntersect([ x0, y0 ], [ x3, y3 ], [ x1, y1 ], [ x2, y2 ]), ax = x0 - oc[0], ay = y0 - oc[1], bx = x1 - oc[0], by = y1 - oc[1], kc = 1 / Math.sin(Math.acos((ax * bx + ay * by) / (Math.sqrt(ax * ax + ay * ay) * Math.sqrt(bx * bx + by * by))) / 2), lc = Math.sqrt(oc[0] * oc[0] + oc[1] * oc[1]); + if (x1 != null) { + var rc1 = Math.min(rc, (r1 - lc) / (kc + 1)), t30 = d3_svg_arcCornerTangents(x3 == null ? [ x2, y2 ] : [ x3, y3 ], [ x0, y0 ], r1, rc1, cw), t12 = d3_svg_arcCornerTangents([ x1, y1 ], [ x2, y2 ], r1, rc1, cw); + if (rc === rc1) { + path.push("M", t30[0], "A", rc1, ",", rc1, " 0 0,", cr, " ", t30[1], "A", r1, ",", r1, " 0 ", 1 - cw ^ d3_svg_arcSweep(t30[1][0], t30[1][1], t12[1][0], t12[1][1]), ",", cw, " ", t12[1], "A", rc1, ",", rc1, " 0 0,", cr, " ", t12[0]); + } else { + path.push("M", t30[0], "A", rc1, ",", rc1, " 0 1,", cr, " ", t12[0]); + } + } else { + path.push("M", x0, ",", y0); + } + if (x3 != null) { + var rc0 = Math.min(rc, (r0 - lc) / (kc - 1)), t03 = d3_svg_arcCornerTangents([ x0, y0 ], [ x3, y3 ], r0, -rc0, cw), t21 = d3_svg_arcCornerTangents([ x2, y2 ], x1 == null ? [ x0, y0 ] : [ x1, y1 ], r0, -rc0, cw); + if (rc === rc0) { + path.push("L", t21[0], "A", rc0, ",", rc0, " 0 0,", cr, " ", t21[1], "A", r0, ",", r0, " 0 ", cw ^ d3_svg_arcSweep(t21[1][0], t21[1][1], t03[1][0], t03[1][1]), ",", 1 - cw, " ", t03[1], "A", rc0, ",", rc0, " 0 0,", cr, " ", t03[0]); + } else { + path.push("L", t21[0], "A", rc0, ",", rc0, " 0 0,", cr, " ", t03[0]); + } + } else { + path.push("L", x2, ",", y2); + } + } else { + path.push("M", x0, ",", y0); + if (x1 != null) path.push("A", r1, ",", r1, " 0 ", l1, ",", cw, " ", x1, ",", y1); + path.push("L", x2, ",", y2); + if (x3 != null) path.push("A", r0, ",", r0, " 0 ", l0, ",", 1 - cw, " ", x3, ",", y3); + } + path.push("Z"); + return path.join(""); + } + function circleSegment(r1, cw) { + return "M0," + r1 + "A" + r1 + "," + r1 + " 0 1," + cw + " 0," + -r1 + "A" + r1 + "," + r1 + " 0 1," + cw + " 0," + r1; + } + arc.innerRadius = function(v) { + if (!arguments.length) return innerRadius; + innerRadius = d3_functor(v); + return arc; + }; + arc.outerRadius = function(v) { + if (!arguments.length) return outerRadius; + outerRadius = d3_functor(v); + return arc; + }; + arc.cornerRadius = function(v) { + if (!arguments.length) return cornerRadius; + cornerRadius = d3_functor(v); + return arc; + }; + arc.padRadius = function(v) { + if (!arguments.length) return padRadius; + padRadius = v == d3_svg_arcAuto ? d3_svg_arcAuto : d3_functor(v); + return arc; + }; + arc.startAngle = function(v) { + if (!arguments.length) return startAngle; + startAngle = d3_functor(v); + return arc; + }; + arc.endAngle = function(v) { + if (!arguments.length) return endAngle; + endAngle = d3_functor(v); + return arc; + }; + arc.padAngle = function(v) { + if (!arguments.length) return padAngle; + padAngle = d3_functor(v); + return arc; + }; + arc.centroid = function() { + var r = (+innerRadius.apply(this, arguments) + +outerRadius.apply(this, arguments)) / 2, a = (+startAngle.apply(this, arguments) + +endAngle.apply(this, arguments)) / 2 - halfπ; + return [ Math.cos(a) * r, Math.sin(a) * r ]; + }; + return arc; + }; + var d3_svg_arcAuto = "auto"; + function d3_svg_arcInnerRadius(d) { + return d.innerRadius; + } + function d3_svg_arcOuterRadius(d) { + return d.outerRadius; + } + function d3_svg_arcStartAngle(d) { + return d.startAngle; + } + function d3_svg_arcEndAngle(d) { + return d.endAngle; + } + function d3_svg_arcPadAngle(d) { + return d && d.padAngle; + } + function d3_svg_arcSweep(x0, y0, x1, y1) { + return (x0 - x1) * y0 - (y0 - y1) * x0 > 0 ? 0 : 1; + } + function d3_svg_arcCornerTangents(p0, p1, r1, rc, cw) { + var x01 = p0[0] - p1[0], y01 = p0[1] - p1[1], lo = (cw ? rc : -rc) / Math.sqrt(x01 * x01 + y01 * y01), ox = lo * y01, oy = -lo * x01, x1 = p0[0] + ox, y1 = p0[1] + oy, x2 = p1[0] + ox, y2 = p1[1] + oy, x3 = (x1 + x2) / 2, y3 = (y1 + y2) / 2, dx = x2 - x1, dy = y2 - y1, d2 = dx * dx + dy * dy, r = r1 - rc, D = x1 * y2 - x2 * y1, d = (dy < 0 ? -1 : 1) * Math.sqrt(r * r * d2 - D * D), cx0 = (D * dy - dx * d) / d2, cy0 = (-D * dx - dy * d) / d2, cx1 = (D * dy + dx * d) / d2, cy1 = (-D * dx + dy * d) / d2, dx0 = cx0 - x3, dy0 = cy0 - y3, dx1 = cx1 - x3, dy1 = cy1 - y3; + if (dx0 * dx0 + dy0 * dy0 > dx1 * dx1 + dy1 * dy1) cx0 = cx1, cy0 = cy1; + return [ [ cx0 - ox, cy0 - oy ], [ cx0 * r1 / r, cy0 * r1 / r ] ]; + } + function d3_svg_line(projection) { + var x = d3_geom_pointX, y = d3_geom_pointY, defined = d3_true, interpolate = d3_svg_lineLinear, interpolateKey = interpolate.key, tension = .7; + function line(data) { + var segments = [], points = [], i = -1, n = data.length, d, fx = d3_functor(x), fy = d3_functor(y); + function segment() { + segments.push("M", interpolate(projection(points), tension)); + } + while (++i < n) { + if (defined.call(this, d = data[i], i)) { + points.push([ +fx.call(this, d, i), +fy.call(this, d, i) ]); + } else if (points.length) { + segment(); + points = []; + } + } + if (points.length) segment(); + return segments.length ? segments.join("") : null; + } + line.x = function(_) { + if (!arguments.length) return x; + x = _; + return line; + }; + line.y = function(_) { + if (!arguments.length) return y; + y = _; + return line; + }; + line.defined = function(_) { + if (!arguments.length) return defined; + defined = _; + return line; + }; + line.interpolate = function(_) { + if (!arguments.length) return interpolateKey; + if (typeof _ === "function") interpolateKey = interpolate = _; else interpolateKey = (interpolate = d3_svg_lineInterpolators.get(_) || d3_svg_lineLinear).key; + return line; + }; + line.tension = function(_) { + if (!arguments.length) return tension; + tension = _; + return line; + }; + return line; + } + d3.svg.line = function() { + return d3_svg_line(d3_identity); + }; + var d3_svg_lineInterpolators = d3.map({ + linear: d3_svg_lineLinear, + "linear-closed": d3_svg_lineLinearClosed, + step: d3_svg_lineStep, + "step-before": d3_svg_lineStepBefore, + "step-after": d3_svg_lineStepAfter, + basis: d3_svg_lineBasis, + "basis-open": d3_svg_lineBasisOpen, + "basis-closed": d3_svg_lineBasisClosed, + bundle: d3_svg_lineBundle, + cardinal: d3_svg_lineCardinal, + "cardinal-open": d3_svg_lineCardinalOpen, + "cardinal-closed": d3_svg_lineCardinalClosed, + monotone: d3_svg_lineMonotone + }); + d3_svg_lineInterpolators.forEach(function(key, value) { + value.key = key; + value.closed = /-closed$/.test(key); + }); + function d3_svg_lineLinear(points) { + return points.join("L"); + } + function d3_svg_lineLinearClosed(points) { + return d3_svg_lineLinear(points) + "Z"; + } + function d3_svg_lineStep(points) { + var i = 0, n = points.length, p = points[0], path = [ p[0], ",", p[1] ]; + while (++i < n) path.push("H", (p[0] + (p = points[i])[0]) / 2, "V", p[1]); + if (n > 1) path.push("H", p[0]); + return path.join(""); + } + function d3_svg_lineStepBefore(points) { + var i = 0, n = points.length, p = points[0], path = [ p[0], ",", p[1] ]; + while (++i < n) path.push("V", (p = points[i])[1], "H", p[0]); + return path.join(""); + } + function d3_svg_lineStepAfter(points) { + var i = 0, n = points.length, p = points[0], path = [ p[0], ",", p[1] ]; + while (++i < n) path.push("H", (p = points[i])[0], "V", p[1]); + return path.join(""); + } + function d3_svg_lineCardinalOpen(points, tension) { + return points.length < 4 ? d3_svg_lineLinear(points) : points[1] + d3_svg_lineHermite(points.slice(1, -1), d3_svg_lineCardinalTangents(points, tension)); + } + function d3_svg_lineCardinalClosed(points, tension) { + return points.length < 3 ? d3_svg_lineLinear(points) : points[0] + d3_svg_lineHermite((points.push(points[0]), + points), d3_svg_lineCardinalTangents([ points[points.length - 2] ].concat(points, [ points[1] ]), tension)); + } + function d3_svg_lineCardinal(points, tension) { + return points.length < 3 ? d3_svg_lineLinear(points) : points[0] + d3_svg_lineHermite(points, d3_svg_lineCardinalTangents(points, tension)); + } + function d3_svg_lineHermite(points, tangents) { + if (tangents.length < 1 || points.length != tangents.length && points.length != tangents.length + 2) { + return d3_svg_lineLinear(points); + } + var quad = points.length != tangents.length, path = "", p0 = points[0], p = points[1], t0 = tangents[0], t = t0, pi = 1; + if (quad) { + path += "Q" + (p[0] - t0[0] * 2 / 3) + "," + (p[1] - t0[1] * 2 / 3) + "," + p[0] + "," + p[1]; + p0 = points[1]; + pi = 2; + } + if (tangents.length > 1) { + t = tangents[1]; + p = points[pi]; + pi++; + path += "C" + (p0[0] + t0[0]) + "," + (p0[1] + t0[1]) + "," + (p[0] - t[0]) + "," + (p[1] - t[1]) + "," + p[0] + "," + p[1]; + for (var i = 2; i < tangents.length; i++, pi++) { + p = points[pi]; + t = tangents[i]; + path += "S" + (p[0] - t[0]) + "," + (p[1] - t[1]) + "," + p[0] + "," + p[1]; + } + } + if (quad) { + var lp = points[pi]; + path += "Q" + (p[0] + t[0] * 2 / 3) + "," + (p[1] + t[1] * 2 / 3) + "," + lp[0] + "," + lp[1]; + } + return path; + } + function d3_svg_lineCardinalTangents(points, tension) { + var tangents = [], a = (1 - tension) / 2, p0, p1 = points[0], p2 = points[1], i = 1, n = points.length; + while (++i < n) { + p0 = p1; + p1 = p2; + p2 = points[i]; + tangents.push([ a * (p2[0] - p0[0]), a * (p2[1] - p0[1]) ]); + } + return tangents; + } + function d3_svg_lineBasis(points) { + if (points.length < 3) return d3_svg_lineLinear(points); + var i = 1, n = points.length, pi = points[0], x0 = pi[0], y0 = pi[1], px = [ x0, x0, x0, (pi = points[1])[0] ], py = [ y0, y0, y0, pi[1] ], path = [ x0, ",", y0, "L", d3_svg_lineDot4(d3_svg_lineBasisBezier3, px), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier3, py) ]; + points.push(points[n - 1]); + while (++i <= n) { + pi = points[i]; + px.shift(); + px.push(pi[0]); + py.shift(); + py.push(pi[1]); + d3_svg_lineBasisBezier(path, px, py); + } + points.pop(); + path.push("L", pi); + return path.join(""); + } + function d3_svg_lineBasisOpen(points) { + if (points.length < 4) return d3_svg_lineLinear(points); + var path = [], i = -1, n = points.length, pi, px = [ 0 ], py = [ 0 ]; + while (++i < 3) { + pi = points[i]; + px.push(pi[0]); + py.push(pi[1]); + } + path.push(d3_svg_lineDot4(d3_svg_lineBasisBezier3, px) + "," + d3_svg_lineDot4(d3_svg_lineBasisBezier3, py)); + --i; + while (++i < n) { + pi = points[i]; + px.shift(); + px.push(pi[0]); + py.shift(); + py.push(pi[1]); + d3_svg_lineBasisBezier(path, px, py); + } + return path.join(""); + } + function d3_svg_lineBasisClosed(points) { + var path, i = -1, n = points.length, m = n + 4, pi, px = [], py = []; + while (++i < 4) { + pi = points[i % n]; + px.push(pi[0]); + py.push(pi[1]); + } + path = [ d3_svg_lineDot4(d3_svg_lineBasisBezier3, px), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier3, py) ]; + --i; + while (++i < m) { + pi = points[i % n]; + px.shift(); + px.push(pi[0]); + py.shift(); + py.push(pi[1]); + d3_svg_lineBasisBezier(path, px, py); + } + return path.join(""); + } + function d3_svg_lineBundle(points, tension) { + var n = points.length - 1; + if (n) { + var x0 = points[0][0], y0 = points[0][1], dx = points[n][0] - x0, dy = points[n][1] - y0, i = -1, p, t; + while (++i <= n) { + p = points[i]; + t = i / n; + p[0] = tension * p[0] + (1 - tension) * (x0 + t * dx); + p[1] = tension * p[1] + (1 - tension) * (y0 + t * dy); + } + } + return d3_svg_lineBasis(points); + } + function d3_svg_lineDot4(a, b) { + return a[0] * b[0] + a[1] * b[1] + a[2] * b[2] + a[3] * b[3]; + } + var d3_svg_lineBasisBezier1 = [ 0, 2 / 3, 1 / 3, 0 ], d3_svg_lineBasisBezier2 = [ 0, 1 / 3, 2 / 3, 0 ], d3_svg_lineBasisBezier3 = [ 0, 1 / 6, 2 / 3, 1 / 6 ]; + function d3_svg_lineBasisBezier(path, x, y) { + path.push("C", d3_svg_lineDot4(d3_svg_lineBasisBezier1, x), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier1, y), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier2, x), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier2, y), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier3, x), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier3, y)); + } + function d3_svg_lineSlope(p0, p1) { + return (p1[1] - p0[1]) / (p1[0] - p0[0]); + } + function d3_svg_lineFiniteDifferences(points) { + var i = 0, j = points.length - 1, m = [], p0 = points[0], p1 = points[1], d = m[0] = d3_svg_lineSlope(p0, p1); + while (++i < j) { + m[i] = (d + (d = d3_svg_lineSlope(p0 = p1, p1 = points[i + 1]))) / 2; + } + m[i] = d; + return m; + } + function d3_svg_lineMonotoneTangents(points) { + var tangents = [], d, a, b, s, m = d3_svg_lineFiniteDifferences(points), i = -1, j = points.length - 1; + while (++i < j) { + d = d3_svg_lineSlope(points[i], points[i + 1]); + if (abs(d) < ε) { + m[i] = m[i + 1] = 0; + } else { + a = m[i] / d; + b = m[i + 1] / d; + s = a * a + b * b; + if (s > 9) { + s = d * 3 / Math.sqrt(s); + m[i] = s * a; + m[i + 1] = s * b; + } + } + } + i = -1; + while (++i <= j) { + s = (points[Math.min(j, i + 1)][0] - points[Math.max(0, i - 1)][0]) / (6 * (1 + m[i] * m[i])); + tangents.push([ s || 0, m[i] * s || 0 ]); + } + return tangents; + } + function d3_svg_lineMonotone(points) { + return points.length < 3 ? d3_svg_lineLinear(points) : points[0] + d3_svg_lineHermite(points, d3_svg_lineMonotoneTangents(points)); + } + d3.svg.line.radial = function() { + var line = d3_svg_line(d3_svg_lineRadial); + line.radius = line.x, delete line.x; + line.angle = line.y, delete line.y; + return line; + }; + function d3_svg_lineRadial(points) { + var point, i = -1, n = points.length, r, a; + while (++i < n) { + point = points[i]; + r = point[0]; + a = point[1] - halfπ; + point[0] = r * Math.cos(a); + point[1] = r * Math.sin(a); + } + return points; + } + function d3_svg_area(projection) { + var x0 = d3_geom_pointX, x1 = d3_geom_pointX, y0 = 0, y1 = d3_geom_pointY, defined = d3_true, interpolate = d3_svg_lineLinear, interpolateKey = interpolate.key, interpolateReverse = interpolate, L = "L", tension = .7; + function area(data) { + var segments = [], points0 = [], points1 = [], i = -1, n = data.length, d, fx0 = d3_functor(x0), fy0 = d3_functor(y0), fx1 = x0 === x1 ? function() { + return x; + } : d3_functor(x1), fy1 = y0 === y1 ? function() { + return y; + } : d3_functor(y1), x, y; + function segment() { + segments.push("M", interpolate(projection(points1), tension), L, interpolateReverse(projection(points0.reverse()), tension), "Z"); + } + while (++i < n) { + if (defined.call(this, d = data[i], i)) { + points0.push([ x = +fx0.call(this, d, i), y = +fy0.call(this, d, i) ]); + points1.push([ +fx1.call(this, d, i), +fy1.call(this, d, i) ]); + } else if (points0.length) { + segment(); + points0 = []; + points1 = []; + } + } + if (points0.length) segment(); + return segments.length ? segments.join("") : null; + } + area.x = function(_) { + if (!arguments.length) return x1; + x0 = x1 = _; + return area; + }; + area.x0 = function(_) { + if (!arguments.length) return x0; + x0 = _; + return area; + }; + area.x1 = function(_) { + if (!arguments.length) return x1; + x1 = _; + return area; + }; + area.y = function(_) { + if (!arguments.length) return y1; + y0 = y1 = _; + return area; + }; + area.y0 = function(_) { + if (!arguments.length) return y0; + y0 = _; + return area; + }; + area.y1 = function(_) { + if (!arguments.length) return y1; + y1 = _; + return area; + }; + area.defined = function(_) { + if (!arguments.length) return defined; + defined = _; + return area; + }; + area.interpolate = function(_) { + if (!arguments.length) return interpolateKey; + if (typeof _ === "function") interpolateKey = interpolate = _; else interpolateKey = (interpolate = d3_svg_lineInterpolators.get(_) || d3_svg_lineLinear).key; + interpolateReverse = interpolate.reverse || interpolate; + L = interpolate.closed ? "M" : "L"; + return area; + }; + area.tension = function(_) { + if (!arguments.length) return tension; + tension = _; + return area; + }; + return area; + } + d3_svg_lineStepBefore.reverse = d3_svg_lineStepAfter; + d3_svg_lineStepAfter.reverse = d3_svg_lineStepBefore; + d3.svg.area = function() { + return d3_svg_area(d3_identity); + }; + d3.svg.area.radial = function() { + var area = d3_svg_area(d3_svg_lineRadial); + area.radius = area.x, delete area.x; + area.innerRadius = area.x0, delete area.x0; + area.outerRadius = area.x1, delete area.x1; + area.angle = area.y, delete area.y; + area.startAngle = area.y0, delete area.y0; + area.endAngle = area.y1, delete area.y1; + return area; + }; + d3.svg.chord = function() { + var source = d3_source, target = d3_target, radius = d3_svg_chordRadius, startAngle = d3_svg_arcStartAngle, endAngle = d3_svg_arcEndAngle; + function chord(d, i) { + var s = subgroup(this, source, d, i), t = subgroup(this, target, d, i); + return "M" + s.p0 + arc(s.r, s.p1, s.a1 - s.a0) + (equals(s, t) ? curve(s.r, s.p1, s.r, s.p0) : curve(s.r, s.p1, t.r, t.p0) + arc(t.r, t.p1, t.a1 - t.a0) + curve(t.r, t.p1, s.r, s.p0)) + "Z"; + } + function subgroup(self, f, d, i) { + var subgroup = f.call(self, d, i), r = radius.call(self, subgroup, i), a0 = startAngle.call(self, subgroup, i) - halfπ, a1 = endAngle.call(self, subgroup, i) - halfπ; + return { + r: r, + a0: a0, + a1: a1, + p0: [ r * Math.cos(a0), r * Math.sin(a0) ], + p1: [ r * Math.cos(a1), r * Math.sin(a1) ] + }; + } + function equals(a, b) { + return a.a0 == b.a0 && a.a1 == b.a1; + } + function arc(r, p, a) { + return "A" + r + "," + r + " 0 " + +(a > π) + ",1 " + p; + } + function curve(r0, p0, r1, p1) { + return "Q 0,0 " + p1; + } + chord.radius = function(v) { + if (!arguments.length) return radius; + radius = d3_functor(v); + return chord; + }; + chord.source = function(v) { + if (!arguments.length) return source; + source = d3_functor(v); + return chord; + }; + chord.target = function(v) { + if (!arguments.length) return target; + target = d3_functor(v); + return chord; + }; + chord.startAngle = function(v) { + if (!arguments.length) return startAngle; + startAngle = d3_functor(v); + return chord; + }; + chord.endAngle = function(v) { + if (!arguments.length) return endAngle; + endAngle = d3_functor(v); + return chord; + }; + return chord; + }; + function d3_svg_chordRadius(d) { + return d.radius; + } + d3.svg.diagonal = function() { + var source = d3_source, target = d3_target, projection = d3_svg_diagonalProjection; + function diagonal(d, i) { + var p0 = source.call(this, d, i), p3 = target.call(this, d, i), m = (p0.y + p3.y) / 2, p = [ p0, { + x: p0.x, + y: m + }, { + x: p3.x, + y: m + }, p3 ]; + p = p.map(projection); + return "M" + p[0] + "C" + p[1] + " " + p[2] + " " + p[3]; + } + diagonal.source = function(x) { + if (!arguments.length) return source; + source = d3_functor(x); + return diagonal; + }; + diagonal.target = function(x) { + if (!arguments.length) return target; + target = d3_functor(x); + return diagonal; + }; + diagonal.projection = function(x) { + if (!arguments.length) return projection; + projection = x; + return diagonal; + }; + return diagonal; + }; + function d3_svg_diagonalProjection(d) { + return [ d.x, d.y ]; + } + d3.svg.diagonal.radial = function() { + var diagonal = d3.svg.diagonal(), projection = d3_svg_diagonalProjection, projection_ = diagonal.projection; + diagonal.projection = function(x) { + return arguments.length ? projection_(d3_svg_diagonalRadialProjection(projection = x)) : projection; + }; + return diagonal; + }; + function d3_svg_diagonalRadialProjection(projection) { + return function() { + var d = projection.apply(this, arguments), r = d[0], a = d[1] - halfπ; + return [ r * Math.cos(a), r * Math.sin(a) ]; + }; + } + d3.svg.symbol = function() { + var type = d3_svg_symbolType, size = d3_svg_symbolSize; + function symbol(d, i) { + return (d3_svg_symbols.get(type.call(this, d, i)) || d3_svg_symbolCircle)(size.call(this, d, i)); + } + symbol.type = function(x) { + if (!arguments.length) return type; + type = d3_functor(x); + return symbol; + }; + symbol.size = function(x) { + if (!arguments.length) return size; + size = d3_functor(x); + return symbol; + }; + return symbol; + }; + function d3_svg_symbolSize() { + return 64; + } + function d3_svg_symbolType() { + return "circle"; + } + function d3_svg_symbolCircle(size) { + var r = Math.sqrt(size / π); + return "M0," + r + "A" + r + "," + r + " 0 1,1 0," + -r + "A" + r + "," + r + " 0 1,1 0," + r + "Z"; + } + var d3_svg_symbols = d3.map({ + circle: d3_svg_symbolCircle, + cross: function(size) { + var r = Math.sqrt(size / 5) / 2; + return "M" + -3 * r + "," + -r + "H" + -r + "V" + -3 * r + "H" + r + "V" + -r + "H" + 3 * r + "V" + r + "H" + r + "V" + 3 * r + "H" + -r + "V" + r + "H" + -3 * r + "Z"; + }, + diamond: function(size) { + var ry = Math.sqrt(size / (2 * d3_svg_symbolTan30)), rx = ry * d3_svg_symbolTan30; + return "M0," + -ry + "L" + rx + ",0" + " 0," + ry + " " + -rx + ",0" + "Z"; + }, + square: function(size) { + var r = Math.sqrt(size) / 2; + return "M" + -r + "," + -r + "L" + r + "," + -r + " " + r + "," + r + " " + -r + "," + r + "Z"; + }, + "triangle-down": function(size) { + var rx = Math.sqrt(size / d3_svg_symbolSqrt3), ry = rx * d3_svg_symbolSqrt3 / 2; + return "M0," + ry + "L" + rx + "," + -ry + " " + -rx + "," + -ry + "Z"; + }, + "triangle-up": function(size) { + var rx = Math.sqrt(size / d3_svg_symbolSqrt3), ry = rx * d3_svg_symbolSqrt3 / 2; + return "M0," + -ry + "L" + rx + "," + ry + " " + -rx + "," + ry + "Z"; + } + }); + d3.svg.symbolTypes = d3_svg_symbols.keys(); + var d3_svg_symbolSqrt3 = Math.sqrt(3), d3_svg_symbolTan30 = Math.tan(30 * d3_radians); + d3_selectionPrototype.transition = function(name) { + var id = d3_transitionInheritId || ++d3_transitionId, ns = d3_transitionNamespace(name), subgroups = [], subgroup, node, transition = d3_transitionInherit || { + time: Date.now(), + ease: d3_ease_cubicInOut, + delay: 0, + duration: 250 + }; + for (var j = -1, m = this.length; ++j < m; ) { + subgroups.push(subgroup = []); + for (var group = this[j], i = -1, n = group.length; ++i < n; ) { + if (node = group[i]) d3_transitionNode(node, i, ns, id, transition); + subgroup.push(node); + } + } + return d3_transition(subgroups, ns, id); + }; + d3_selectionPrototype.interrupt = function(name) { + return this.each(name == null ? d3_selection_interrupt : d3_selection_interruptNS(d3_transitionNamespace(name))); + }; + var d3_selection_interrupt = d3_selection_interruptNS(d3_transitionNamespace()); + function d3_selection_interruptNS(ns) { + return function() { + var lock, active; + if ((lock = this[ns]) && (active = lock[lock.active])) { + if (--lock.count) delete lock[lock.active]; else delete this[ns]; + lock.active += .5; + active.event && active.event.interrupt.call(this, this.__data__, active.index); + } + }; + } + function d3_transition(groups, ns, id) { + d3_subclass(groups, d3_transitionPrototype); + groups.namespace = ns; + groups.id = id; + return groups; + } + var d3_transitionPrototype = [], d3_transitionId = 0, d3_transitionInheritId, d3_transitionInherit; + d3_transitionPrototype.call = d3_selectionPrototype.call; + d3_transitionPrototype.empty = d3_selectionPrototype.empty; + d3_transitionPrototype.node = d3_selectionPrototype.node; + d3_transitionPrototype.size = d3_selectionPrototype.size; + d3.transition = function(selection, name) { + return selection && selection.transition ? d3_transitionInheritId ? selection.transition(name) : selection : d3.selection().transition(selection); + }; + d3.transition.prototype = d3_transitionPrototype; + d3_transitionPrototype.select = function(selector) { + var id = this.id, ns = this.namespace, subgroups = [], subgroup, subnode, node; + selector = d3_selection_selector(selector); + for (var j = -1, m = this.length; ++j < m; ) { + subgroups.push(subgroup = []); + for (var group = this[j], i = -1, n = group.length; ++i < n; ) { + if ((node = group[i]) && (subnode = selector.call(node, node.__data__, i, j))) { + if ("__data__" in node) subnode.__data__ = node.__data__; + d3_transitionNode(subnode, i, ns, id, node[ns][id]); + subgroup.push(subnode); + } else { + subgroup.push(null); + } + } + } + return d3_transition(subgroups, ns, id); + }; + d3_transitionPrototype.selectAll = function(selector) { + var id = this.id, ns = this.namespace, subgroups = [], subgroup, subnodes, node, subnode, transition; + selector = d3_selection_selectorAll(selector); + for (var j = -1, m = this.length; ++j < m; ) { + for (var group = this[j], i = -1, n = group.length; ++i < n; ) { + if (node = group[i]) { + transition = node[ns][id]; + subnodes = selector.call(node, node.__data__, i, j); + subgroups.push(subgroup = []); + for (var k = -1, o = subnodes.length; ++k < o; ) { + if (subnode = subnodes[k]) d3_transitionNode(subnode, k, ns, id, transition); + subgroup.push(subnode); + } + } + } + } + return d3_transition(subgroups, ns, id); + }; + d3_transitionPrototype.filter = function(filter) { + var subgroups = [], subgroup, group, node; + if (typeof filter !== "function") filter = d3_selection_filter(filter); + for (var j = 0, m = this.length; j < m; j++) { + subgroups.push(subgroup = []); + for (var group = this[j], i = 0, n = group.length; i < n; i++) { + if ((node = group[i]) && filter.call(node, node.__data__, i, j)) { + subgroup.push(node); + } + } + } + return d3_transition(subgroups, this.namespace, this.id); + }; + d3_transitionPrototype.tween = function(name, tween) { + var id = this.id, ns = this.namespace; + if (arguments.length < 2) return this.node()[ns][id].tween.get(name); + return d3_selection_each(this, tween == null ? function(node) { + node[ns][id].tween.remove(name); + } : function(node) { + node[ns][id].tween.set(name, tween); + }); + }; + function d3_transition_tween(groups, name, value, tween) { + var id = groups.id, ns = groups.namespace; + return d3_selection_each(groups, typeof value === "function" ? function(node, i, j) { + node[ns][id].tween.set(name, tween(value.call(node, node.__data__, i, j))); + } : (value = tween(value), function(node) { + node[ns][id].tween.set(name, value); + })); + } + d3_transitionPrototype.attr = function(nameNS, value) { + if (arguments.length < 2) { + for (value in nameNS) this.attr(value, nameNS[value]); + return this; + } + var interpolate = nameNS == "transform" ? d3_interpolateTransform : d3_interpolate, name = d3.ns.qualify(nameNS); + function attrNull() { + this.removeAttribute(name); + } + function attrNullNS() { + this.removeAttributeNS(name.space, name.local); + } + function attrTween(b) { + return b == null ? attrNull : (b += "", function() { + var a = this.getAttribute(name), i; + return a !== b && (i = interpolate(a, b), function(t) { + this.setAttribute(name, i(t)); + }); + }); + } + function attrTweenNS(b) { + return b == null ? attrNullNS : (b += "", function() { + var a = this.getAttributeNS(name.space, name.local), i; + return a !== b && (i = interpolate(a, b), function(t) { + this.setAttributeNS(name.space, name.local, i(t)); + }); + }); + } + return d3_transition_tween(this, "attr." + nameNS, value, name.local ? attrTweenNS : attrTween); + }; + d3_transitionPrototype.attrTween = function(nameNS, tween) { + var name = d3.ns.qualify(nameNS); + function attrTween(d, i) { + var f = tween.call(this, d, i, this.getAttribute(name)); + return f && function(t) { + this.setAttribute(name, f(t)); + }; + } + function attrTweenNS(d, i) { + var f = tween.call(this, d, i, this.getAttributeNS(name.space, name.local)); + return f && function(t) { + this.setAttributeNS(name.space, name.local, f(t)); + }; + } + return this.tween("attr." + nameNS, name.local ? attrTweenNS : attrTween); + }; + d3_transitionPrototype.style = function(name, value, priority) { + var n = arguments.length; + if (n < 3) { + if (typeof name !== "string") { + if (n < 2) value = ""; + for (priority in name) this.style(priority, name[priority], value); + return this; + } + priority = ""; + } + function styleNull() { + this.style.removeProperty(name); + } + function styleString(b) { + return b == null ? styleNull : (b += "", function() { + var a = d3_window(this).getComputedStyle(this, null).getPropertyValue(name), i; + return a !== b && (i = d3_interpolate(a, b), function(t) { + this.style.setProperty(name, i(t), priority); + }); + }); + } + return d3_transition_tween(this, "style." + name, value, styleString); + }; + d3_transitionPrototype.styleTween = function(name, tween, priority) { + if (arguments.length < 3) priority = ""; + function styleTween(d, i) { + var f = tween.call(this, d, i, d3_window(this).getComputedStyle(this, null).getPropertyValue(name)); + return f && function(t) { + this.style.setProperty(name, f(t), priority); + }; + } + return this.tween("style." + name, styleTween); + }; + d3_transitionPrototype.text = function(value) { + return d3_transition_tween(this, "text", value, d3_transition_text); + }; + function d3_transition_text(b) { + if (b == null) b = ""; + return function() { + this.textContent = b; + }; + } + d3_transitionPrototype.remove = function() { + var ns = this.namespace; + return this.each("end.transition", function() { + var p; + if (this[ns].count < 2 && (p = this.parentNode)) p.removeChild(this); + }); + }; + d3_transitionPrototype.ease = function(value) { + var id = this.id, ns = this.namespace; + if (arguments.length < 1) return this.node()[ns][id].ease; + if (typeof value !== "function") value = d3.ease.apply(d3, arguments); + return d3_selection_each(this, function(node) { + node[ns][id].ease = value; + }); + }; + d3_transitionPrototype.delay = function(value) { + var id = this.id, ns = this.namespace; + if (arguments.length < 1) return this.node()[ns][id].delay; + return d3_selection_each(this, typeof value === "function" ? function(node, i, j) { + node[ns][id].delay = +value.call(node, node.__data__, i, j); + } : (value = +value, function(node) { + node[ns][id].delay = value; + })); + }; + d3_transitionPrototype.duration = function(value) { + var id = this.id, ns = this.namespace; + if (arguments.length < 1) return this.node()[ns][id].duration; + return d3_selection_each(this, typeof value === "function" ? function(node, i, j) { + node[ns][id].duration = Math.max(1, value.call(node, node.__data__, i, j)); + } : (value = Math.max(1, value), function(node) { + node[ns][id].duration = value; + })); + }; + d3_transitionPrototype.each = function(type, listener) { + var id = this.id, ns = this.namespace; + if (arguments.length < 2) { + var inherit = d3_transitionInherit, inheritId = d3_transitionInheritId; + try { + d3_transitionInheritId = id; + d3_selection_each(this, function(node, i, j) { + d3_transitionInherit = node[ns][id]; + type.call(node, node.__data__, i, j); + }); + } finally { + d3_transitionInherit = inherit; + d3_transitionInheritId = inheritId; + } + } else { + d3_selection_each(this, function(node) { + var transition = node[ns][id]; + (transition.event || (transition.event = d3.dispatch("start", "end", "interrupt"))).on(type, listener); + }); + } + return this; + }; + d3_transitionPrototype.transition = function() { + var id0 = this.id, id1 = ++d3_transitionId, ns = this.namespace, subgroups = [], subgroup, group, node, transition; + for (var j = 0, m = this.length; j < m; j++) { + subgroups.push(subgroup = []); + for (var group = this[j], i = 0, n = group.length; i < n; i++) { + if (node = group[i]) { + transition = node[ns][id0]; + d3_transitionNode(node, i, ns, id1, { + time: transition.time, + ease: transition.ease, + delay: transition.delay + transition.duration, + duration: transition.duration + }); + } + subgroup.push(node); + } + } + return d3_transition(subgroups, ns, id1); + }; + function d3_transitionNamespace(name) { + return name == null ? "__transition__" : "__transition_" + name + "__"; + } + function d3_transitionNode(node, i, ns, id, inherit) { + var lock = node[ns] || (node[ns] = { + active: 0, + count: 0 + }), transition = lock[id]; + if (!transition) { + var time = inherit.time; + transition = lock[id] = { + tween: new d3_Map(), + time: time, + delay: inherit.delay, + duration: inherit.duration, + ease: inherit.ease, + index: i + }; + inherit = null; + ++lock.count; + d3.timer(function(elapsed) { + var delay = transition.delay, duration, ease, timer = d3_timer_active, tweened = []; + timer.t = delay + time; + if (delay <= elapsed) return start(elapsed - delay); + timer.c = start; + function start(elapsed) { + if (lock.active > id) return stop(); + var active = lock[lock.active]; + if (active) { + --lock.count; + delete lock[lock.active]; + active.event && active.event.interrupt.call(node, node.__data__, active.index); + } + lock.active = id; + transition.event && transition.event.start.call(node, node.__data__, i); + transition.tween.forEach(function(key, value) { + if (value = value.call(node, node.__data__, i)) { + tweened.push(value); + } + }); + ease = transition.ease; + duration = transition.duration; + d3.timer(function() { + timer.c = tick(elapsed || 1) ? d3_true : tick; + return 1; + }, 0, time); + } + function tick(elapsed) { + if (lock.active !== id) return 1; + var t = elapsed / duration, e = ease(t), n = tweened.length; + while (n > 0) { + tweened[--n].call(node, e); + } + if (t >= 1) { + transition.event && transition.event.end.call(node, node.__data__, i); + return stop(); + } + } + function stop() { + if (--lock.count) delete lock[id]; else delete node[ns]; + return 1; + } + }, 0, time); + } + } + d3.svg.axis = function() { + var scale = d3.scale.linear(), orient = d3_svg_axisDefaultOrient, innerTickSize = 6, outerTickSize = 6, tickPadding = 3, tickArguments_ = [ 10 ], tickValues = null, tickFormat_; + function axis(g) { + g.each(function() { + var g = d3.select(this); + var scale0 = this.__chart__ || scale, scale1 = this.__chart__ = scale.copy(); + var ticks = tickValues == null ? scale1.ticks ? scale1.ticks.apply(scale1, tickArguments_) : scale1.domain() : tickValues, tickFormat = tickFormat_ == null ? scale1.tickFormat ? scale1.tickFormat.apply(scale1, tickArguments_) : d3_identity : tickFormat_, tick = g.selectAll(".tick").data(ticks, scale1), tickEnter = tick.enter().insert("g", ".domain").attr("class", "tick").style("opacity", ε), tickExit = d3.transition(tick.exit()).style("opacity", ε).remove(), tickUpdate = d3.transition(tick.order()).style("opacity", 1), tickSpacing = Math.max(innerTickSize, 0) + tickPadding, tickTransform; + var range = d3_scaleRange(scale1), path = g.selectAll(".domain").data([ 0 ]), pathUpdate = (path.enter().append("path").attr("class", "domain"), + d3.transition(path)); + tickEnter.append("line"); + tickEnter.append("text"); + var lineEnter = tickEnter.select("line"), lineUpdate = tickUpdate.select("line"), text = tick.select("text").text(tickFormat), textEnter = tickEnter.select("text"), textUpdate = tickUpdate.select("text"), sign = orient === "top" || orient === "left" ? -1 : 1, x1, x2, y1, y2; + if (orient === "bottom" || orient === "top") { + tickTransform = d3_svg_axisX, x1 = "x", y1 = "y", x2 = "x2", y2 = "y2"; + text.attr("dy", sign < 0 ? "0em" : ".71em").style("text-anchor", "middle"); + pathUpdate.attr("d", "M" + range[0] + "," + sign * outerTickSize + "V0H" + range[1] + "V" + sign * outerTickSize); + } else { + tickTransform = d3_svg_axisY, x1 = "y", y1 = "x", x2 = "y2", y2 = "x2"; + text.attr("dy", ".32em").style("text-anchor", sign < 0 ? "end" : "start"); + pathUpdate.attr("d", "M" + sign * outerTickSize + "," + range[0] + "H0V" + range[1] + "H" + sign * outerTickSize); + } + lineEnter.attr(y2, sign * innerTickSize); + textEnter.attr(y1, sign * tickSpacing); + lineUpdate.attr(x2, 0).attr(y2, sign * innerTickSize); + textUpdate.attr(x1, 0).attr(y1, sign * tickSpacing); + if (scale1.rangeBand) { + var x = scale1, dx = x.rangeBand() / 2; + scale0 = scale1 = function(d) { + return x(d) + dx; + }; + } else if (scale0.rangeBand) { + scale0 = scale1; + } else { + tickExit.call(tickTransform, scale1, scale0); + } + tickEnter.call(tickTransform, scale0, scale1); + tickUpdate.call(tickTransform, scale1, scale1); + }); + } + axis.scale = function(x) { + if (!arguments.length) return scale; + scale = x; + return axis; + }; + axis.orient = function(x) { + if (!arguments.length) return orient; + orient = x in d3_svg_axisOrients ? x + "" : d3_svg_axisDefaultOrient; + return axis; + }; + axis.ticks = function() { + if (!arguments.length) return tickArguments_; + tickArguments_ = arguments; + return axis; + }; + axis.tickValues = function(x) { + if (!arguments.length) return tickValues; + tickValues = x; + return axis; + }; + axis.tickFormat = function(x) { + if (!arguments.length) return tickFormat_; + tickFormat_ = x; + return axis; + }; + axis.tickSize = function(x) { + var n = arguments.length; + if (!n) return innerTickSize; + innerTickSize = +x; + outerTickSize = +arguments[n - 1]; + return axis; + }; + axis.innerTickSize = function(x) { + if (!arguments.length) return innerTickSize; + innerTickSize = +x; + return axis; + }; + axis.outerTickSize = function(x) { + if (!arguments.length) return outerTickSize; + outerTickSize = +x; + return axis; + }; + axis.tickPadding = function(x) { + if (!arguments.length) return tickPadding; + tickPadding = +x; + return axis; + }; + axis.tickSubdivide = function() { + return arguments.length && axis; + }; + return axis; + }; + var d3_svg_axisDefaultOrient = "bottom", d3_svg_axisOrients = { + top: 1, + right: 1, + bottom: 1, + left: 1 + }; + function d3_svg_axisX(selection, x0, x1) { + selection.attr("transform", function(d) { + var v0 = x0(d); + return "translate(" + (isFinite(v0) ? v0 : x1(d)) + ",0)"; + }); + } + function d3_svg_axisY(selection, y0, y1) { + selection.attr("transform", function(d) { + var v0 = y0(d); + return "translate(0," + (isFinite(v0) ? v0 : y1(d)) + ")"; + }); + } + d3.svg.brush = function() { + var event = d3_eventDispatch(brush, "brushstart", "brush", "brushend"), x = null, y = null, xExtent = [ 0, 0 ], yExtent = [ 0, 0 ], xExtentDomain, yExtentDomain, xClamp = true, yClamp = true, resizes = d3_svg_brushResizes[0]; + function brush(g) { + g.each(function() { + var g = d3.select(this).style("pointer-events", "all").style("-webkit-tap-highlight-color", "rgba(0,0,0,0)").on("mousedown.brush", brushstart).on("touchstart.brush", brushstart); + var background = g.selectAll(".background").data([ 0 ]); + background.enter().append("rect").attr("class", "background").style("visibility", "hidden").style("cursor", "crosshair"); + g.selectAll(".extent").data([ 0 ]).enter().append("rect").attr("class", "extent").style("cursor", "move"); + var resize = g.selectAll(".resize").data(resizes, d3_identity); + resize.exit().remove(); + resize.enter().append("g").attr("class", function(d) { + return "resize " + d; + }).style("cursor", function(d) { + return d3_svg_brushCursor[d]; + }).append("rect").attr("x", function(d) { + return /[ew]$/.test(d) ? -3 : null; + }).attr("y", function(d) { + return /^[ns]/.test(d) ? -3 : null; + }).attr("width", 6).attr("height", 6).style("visibility", "hidden"); + resize.style("display", brush.empty() ? "none" : null); + var gUpdate = d3.transition(g), backgroundUpdate = d3.transition(background), range; + if (x) { + range = d3_scaleRange(x); + backgroundUpdate.attr("x", range[0]).attr("width", range[1] - range[0]); + redrawX(gUpdate); + } + if (y) { + range = d3_scaleRange(y); + backgroundUpdate.attr("y", range[0]).attr("height", range[1] - range[0]); + redrawY(gUpdate); + } + redraw(gUpdate); + }); + } + brush.event = function(g) { + g.each(function() { + var event_ = event.of(this, arguments), extent1 = { + x: xExtent, + y: yExtent, + i: xExtentDomain, + j: yExtentDomain + }, extent0 = this.__chart__ || extent1; + this.__chart__ = extent1; + if (d3_transitionInheritId) { + d3.select(this).transition().each("start.brush", function() { + xExtentDomain = extent0.i; + yExtentDomain = extent0.j; + xExtent = extent0.x; + yExtent = extent0.y; + event_({ + type: "brushstart" + }); + }).tween("brush:brush", function() { + var xi = d3_interpolateArray(xExtent, extent1.x), yi = d3_interpolateArray(yExtent, extent1.y); + xExtentDomain = yExtentDomain = null; + return function(t) { + xExtent = extent1.x = xi(t); + yExtent = extent1.y = yi(t); + event_({ + type: "brush", + mode: "resize" + }); + }; + }).each("end.brush", function() { + xExtentDomain = extent1.i; + yExtentDomain = extent1.j; + event_({ + type: "brush", + mode: "resize" + }); + event_({ + type: "brushend" + }); + }); + } else { + event_({ + type: "brushstart" + }); + event_({ + type: "brush", + mode: "resize" + }); + event_({ + type: "brushend" + }); + } + }); + }; + function redraw(g) { + g.selectAll(".resize").attr("transform", function(d) { + return "translate(" + xExtent[+/e$/.test(d)] + "," + yExtent[+/^s/.test(d)] + ")"; + }); + } + function redrawX(g) { + g.select(".extent").attr("x", xExtent[0]); + g.selectAll(".extent,.n>rect,.s>rect").attr("width", xExtent[1] - xExtent[0]); + } + function redrawY(g) { + g.select(".extent").attr("y", yExtent[0]); + g.selectAll(".extent,.e>rect,.w>rect").attr("height", yExtent[1] - yExtent[0]); + } + function brushstart() { + var target = this, eventTarget = d3.select(d3.event.target), event_ = event.of(target, arguments), g = d3.select(target), resizing = eventTarget.datum(), resizingX = !/^(n|s)$/.test(resizing) && x, resizingY = !/^(e|w)$/.test(resizing) && y, dragging = eventTarget.classed("extent"), dragRestore = d3_event_dragSuppress(target), center, origin = d3.mouse(target), offset; + var w = d3.select(d3_window(target)).on("keydown.brush", keydown).on("keyup.brush", keyup); + if (d3.event.changedTouches) { + w.on("touchmove.brush", brushmove).on("touchend.brush", brushend); + } else { + w.on("mousemove.brush", brushmove).on("mouseup.brush", brushend); + } + g.interrupt().selectAll("*").interrupt(); + if (dragging) { + origin[0] = xExtent[0] - origin[0]; + origin[1] = yExtent[0] - origin[1]; + } else if (resizing) { + var ex = +/w$/.test(resizing), ey = +/^n/.test(resizing); + offset = [ xExtent[1 - ex] - origin[0], yExtent[1 - ey] - origin[1] ]; + origin[0] = xExtent[ex]; + origin[1] = yExtent[ey]; + } else if (d3.event.altKey) center = origin.slice(); + g.style("pointer-events", "none").selectAll(".resize").style("display", null); + d3.select("body").style("cursor", eventTarget.style("cursor")); + event_({ + type: "brushstart" + }); + brushmove(); + function keydown() { + if (d3.event.keyCode == 32) { + if (!dragging) { + center = null; + origin[0] -= xExtent[1]; + origin[1] -= yExtent[1]; + dragging = 2; + } + d3_eventPreventDefault(); + } + } + function keyup() { + if (d3.event.keyCode == 32 && dragging == 2) { + origin[0] += xExtent[1]; + origin[1] += yExtent[1]; + dragging = 0; + d3_eventPreventDefault(); + } + } + function brushmove() { + var point = d3.mouse(target), moved = false; + if (offset) { + point[0] += offset[0]; + point[1] += offset[1]; + } + if (!dragging) { + if (d3.event.altKey) { + if (!center) center = [ (xExtent[0] + xExtent[1]) / 2, (yExtent[0] + yExtent[1]) / 2 ]; + origin[0] = xExtent[+(point[0] < center[0])]; + origin[1] = yExtent[+(point[1] < center[1])]; + } else center = null; + } + if (resizingX && move1(point, x, 0)) { + redrawX(g); + moved = true; + } + if (resizingY && move1(point, y, 1)) { + redrawY(g); + moved = true; + } + if (moved) { + redraw(g); + event_({ + type: "brush", + mode: dragging ? "move" : "resize" + }); + } + } + function move1(point, scale, i) { + var range = d3_scaleRange(scale), r0 = range[0], r1 = range[1], position = origin[i], extent = i ? yExtent : xExtent, size = extent[1] - extent[0], min, max; + if (dragging) { + r0 -= position; + r1 -= size + position; + } + min = (i ? yClamp : xClamp) ? Math.max(r0, Math.min(r1, point[i])) : point[i]; + if (dragging) { + max = (min += position) + size; + } else { + if (center) position = Math.max(r0, Math.min(r1, 2 * center[i] - min)); + if (position < min) { + max = min; + min = position; + } else { + max = position; + } + } + if (extent[0] != min || extent[1] != max) { + if (i) yExtentDomain = null; else xExtentDomain = null; + extent[0] = min; + extent[1] = max; + return true; + } + } + function brushend() { + brushmove(); + g.style("pointer-events", "all").selectAll(".resize").style("display", brush.empty() ? "none" : null); + d3.select("body").style("cursor", null); + w.on("mousemove.brush", null).on("mouseup.brush", null).on("touchmove.brush", null).on("touchend.brush", null).on("keydown.brush", null).on("keyup.brush", null); + dragRestore(); + event_({ + type: "brushend" + }); + } + } + brush.x = function(z) { + if (!arguments.length) return x; + x = z; + resizes = d3_svg_brushResizes[!x << 1 | !y]; + return brush; + }; + brush.y = function(z) { + if (!arguments.length) return y; + y = z; + resizes = d3_svg_brushResizes[!x << 1 | !y]; + return brush; + }; + brush.clamp = function(z) { + if (!arguments.length) return x && y ? [ xClamp, yClamp ] : x ? xClamp : y ? yClamp : null; + if (x && y) xClamp = !!z[0], yClamp = !!z[1]; else if (x) xClamp = !!z; else if (y) yClamp = !!z; + return brush; + }; + brush.extent = function(z) { + var x0, x1, y0, y1, t; + if (!arguments.length) { + if (x) { + if (xExtentDomain) { + x0 = xExtentDomain[0], x1 = xExtentDomain[1]; + } else { + x0 = xExtent[0], x1 = xExtent[1]; + if (x.invert) x0 = x.invert(x0), x1 = x.invert(x1); + if (x1 < x0) t = x0, x0 = x1, x1 = t; + } + } + if (y) { + if (yExtentDomain) { + y0 = yExtentDomain[0], y1 = yExtentDomain[1]; + } else { + y0 = yExtent[0], y1 = yExtent[1]; + if (y.invert) y0 = y.invert(y0), y1 = y.invert(y1); + if (y1 < y0) t = y0, y0 = y1, y1 = t; + } + } + return x && y ? [ [ x0, y0 ], [ x1, y1 ] ] : x ? [ x0, x1 ] : y && [ y0, y1 ]; + } + if (x) { + x0 = z[0], x1 = z[1]; + if (y) x0 = x0[0], x1 = x1[0]; + xExtentDomain = [ x0, x1 ]; + if (x.invert) x0 = x(x0), x1 = x(x1); + if (x1 < x0) t = x0, x0 = x1, x1 = t; + if (x0 != xExtent[0] || x1 != xExtent[1]) xExtent = [ x0, x1 ]; + } + if (y) { + y0 = z[0], y1 = z[1]; + if (x) y0 = y0[1], y1 = y1[1]; + yExtentDomain = [ y0, y1 ]; + if (y.invert) y0 = y(y0), y1 = y(y1); + if (y1 < y0) t = y0, y0 = y1, y1 = t; + if (y0 != yExtent[0] || y1 != yExtent[1]) yExtent = [ y0, y1 ]; + } + return brush; + }; + brush.clear = function() { + if (!brush.empty()) { + xExtent = [ 0, 0 ], yExtent = [ 0, 0 ]; + xExtentDomain = yExtentDomain = null; + } + return brush; + }; + brush.empty = function() { + return !!x && xExtent[0] == xExtent[1] || !!y && yExtent[0] == yExtent[1]; + }; + return d3.rebind(brush, event, "on"); + }; + var d3_svg_brushCursor = { + n: "ns-resize", + e: "ew-resize", + s: "ns-resize", + w: "ew-resize", + nw: "nwse-resize", + ne: "nesw-resize", + se: "nwse-resize", + sw: "nesw-resize" + }; + var d3_svg_brushResizes = [ [ "n", "e", "s", "w", "nw", "ne", "se", "sw" ], [ "e", "w" ], [ "n", "s" ], [] ]; + var d3_time_format = d3_time.format = d3_locale_enUS.timeFormat; + var d3_time_formatUtc = d3_time_format.utc; + var d3_time_formatIso = d3_time_formatUtc("%Y-%m-%dT%H:%M:%S.%LZ"); + d3_time_format.iso = Date.prototype.toISOString && +new Date("2000-01-01T00:00:00.000Z") ? d3_time_formatIsoNative : d3_time_formatIso; + function d3_time_formatIsoNative(date) { + return date.toISOString(); + } + d3_time_formatIsoNative.parse = function(string) { + var date = new Date(string); + return isNaN(date) ? null : date; + }; + d3_time_formatIsoNative.toString = d3_time_formatIso.toString; + d3_time.second = d3_time_interval(function(date) { + return new d3_date(Math.floor(date / 1e3) * 1e3); + }, function(date, offset) { + date.setTime(date.getTime() + Math.floor(offset) * 1e3); + }, function(date) { + return date.getSeconds(); + }); + d3_time.seconds = d3_time.second.range; + d3_time.seconds.utc = d3_time.second.utc.range; + d3_time.minute = d3_time_interval(function(date) { + return new d3_date(Math.floor(date / 6e4) * 6e4); + }, function(date, offset) { + date.setTime(date.getTime() + Math.floor(offset) * 6e4); + }, function(date) { + return date.getMinutes(); + }); + d3_time.minutes = d3_time.minute.range; + d3_time.minutes.utc = d3_time.minute.utc.range; + d3_time.hour = d3_time_interval(function(date) { + var timezone = date.getTimezoneOffset() / 60; + return new d3_date((Math.floor(date / 36e5 - timezone) + timezone) * 36e5); + }, function(date, offset) { + date.setTime(date.getTime() + Math.floor(offset) * 36e5); + }, function(date) { + return date.getHours(); + }); + d3_time.hours = d3_time.hour.range; + d3_time.hours.utc = d3_time.hour.utc.range; + d3_time.month = d3_time_interval(function(date) { + date = d3_time.day(date); + date.setDate(1); + return date; + }, function(date, offset) { + date.setMonth(date.getMonth() + offset); + }, function(date) { + return date.getMonth(); + }); + d3_time.months = d3_time.month.range; + d3_time.months.utc = d3_time.month.utc.range; + function d3_time_scale(linear, methods, format) { + function scale(x) { + return linear(x); + } + scale.invert = function(x) { + return d3_time_scaleDate(linear.invert(x)); + }; + scale.domain = function(x) { + if (!arguments.length) return linear.domain().map(d3_time_scaleDate); + linear.domain(x); + return scale; + }; + function tickMethod(extent, count) { + var span = extent[1] - extent[0], target = span / count, i = d3.bisect(d3_time_scaleSteps, target); + return i == d3_time_scaleSteps.length ? [ methods.year, d3_scale_linearTickRange(extent.map(function(d) { + return d / 31536e6; + }), count)[2] ] : !i ? [ d3_time_scaleMilliseconds, d3_scale_linearTickRange(extent, count)[2] ] : methods[target / d3_time_scaleSteps[i - 1] < d3_time_scaleSteps[i] / target ? i - 1 : i]; + } + scale.nice = function(interval, skip) { + var domain = scale.domain(), extent = d3_scaleExtent(domain), method = interval == null ? tickMethod(extent, 10) : typeof interval === "number" && tickMethod(extent, interval); + if (method) interval = method[0], skip = method[1]; + function skipped(date) { + return !isNaN(date) && !interval.range(date, d3_time_scaleDate(+date + 1), skip).length; + } + return scale.domain(d3_scale_nice(domain, skip > 1 ? { + floor: function(date) { + while (skipped(date = interval.floor(date))) date = d3_time_scaleDate(date - 1); + return date; + }, + ceil: function(date) { + while (skipped(date = interval.ceil(date))) date = d3_time_scaleDate(+date + 1); + return date; + } + } : interval)); + }; + scale.ticks = function(interval, skip) { + var extent = d3_scaleExtent(scale.domain()), method = interval == null ? tickMethod(extent, 10) : typeof interval === "number" ? tickMethod(extent, interval) : !interval.range && [ { + range: interval + }, skip ]; + if (method) interval = method[0], skip = method[1]; + return interval.range(extent[0], d3_time_scaleDate(+extent[1] + 1), skip < 1 ? 1 : skip); + }; + scale.tickFormat = function() { + return format; + }; + scale.copy = function() { + return d3_time_scale(linear.copy(), methods, format); + }; + return d3_scale_linearRebind(scale, linear); + } + function d3_time_scaleDate(t) { + return new Date(t); + } + var d3_time_scaleSteps = [ 1e3, 5e3, 15e3, 3e4, 6e4, 3e5, 9e5, 18e5, 36e5, 108e5, 216e5, 432e5, 864e5, 1728e5, 6048e5, 2592e6, 7776e6, 31536e6 ]; + var d3_time_scaleLocalMethods = [ [ d3_time.second, 1 ], [ d3_time.second, 5 ], [ d3_time.second, 15 ], [ d3_time.second, 30 ], [ d3_time.minute, 1 ], [ d3_time.minute, 5 ], [ d3_time.minute, 15 ], [ d3_time.minute, 30 ], [ d3_time.hour, 1 ], [ d3_time.hour, 3 ], [ d3_time.hour, 6 ], [ d3_time.hour, 12 ], [ d3_time.day, 1 ], [ d3_time.day, 2 ], [ d3_time.week, 1 ], [ d3_time.month, 1 ], [ d3_time.month, 3 ], [ d3_time.year, 1 ] ]; + var d3_time_scaleLocalFormat = d3_time_format.multi([ [ ".%L", function(d) { + return d.getMilliseconds(); + } ], [ ":%S", function(d) { + return d.getSeconds(); + } ], [ "%I:%M", function(d) { + return d.getMinutes(); + } ], [ "%I %p", function(d) { + return d.getHours(); + } ], [ "%a %d", function(d) { + return d.getDay() && d.getDate() != 1; + } ], [ "%b %d", function(d) { + return d.getDate() != 1; + } ], [ "%B", function(d) { + return d.getMonth(); + } ], [ "%Y", d3_true ] ]); + var d3_time_scaleMilliseconds = { + range: function(start, stop, step) { + return d3.range(Math.ceil(start / step) * step, +stop, step).map(d3_time_scaleDate); + }, + floor: d3_identity, + ceil: d3_identity + }; + d3_time_scaleLocalMethods.year = d3_time.year; + d3_time.scale = function() { + return d3_time_scale(d3.scale.linear(), d3_time_scaleLocalMethods, d3_time_scaleLocalFormat); + }; + var d3_time_scaleUtcMethods = d3_time_scaleLocalMethods.map(function(m) { + return [ m[0].utc, m[1] ]; + }); + var d3_time_scaleUtcFormat = d3_time_formatUtc.multi([ [ ".%L", function(d) { + return d.getUTCMilliseconds(); + } ], [ ":%S", function(d) { + return d.getUTCSeconds(); + } ], [ "%I:%M", function(d) { + return d.getUTCMinutes(); + } ], [ "%I %p", function(d) { + return d.getUTCHours(); + } ], [ "%a %d", function(d) { + return d.getUTCDay() && d.getUTCDate() != 1; + } ], [ "%b %d", function(d) { + return d.getUTCDate() != 1; + } ], [ "%B", function(d) { + return d.getUTCMonth(); + } ], [ "%Y", d3_true ] ]); + d3_time_scaleUtcMethods.year = d3_time.year.utc; + d3_time.scale.utc = function() { + return d3_time_scale(d3.scale.linear(), d3_time_scaleUtcMethods, d3_time_scaleUtcFormat); + }; + d3.text = d3_xhrType(function(request) { + return request.responseText; + }); + d3.json = function(url, callback) { + return d3_xhr(url, "application/json", d3_json, callback); + }; + function d3_json(request) { + return JSON.parse(request.responseText); + } + d3.html = function(url, callback) { + return d3_xhr(url, "text/html", d3_html, callback); + }; + function d3_html(request) { + var range = d3_document.createRange(); + range.selectNode(d3_document.body); + return range.createContextualFragment(request.responseText); + } + d3.xml = d3_xhrType(function(request) { + return request.responseXML; + }); + if (typeof define === "function" && define.amd) define(d3); else if (typeof module === "object" && module.exports) module.exports = d3; + this.d3 = d3; +}(); \ No newline at end of file diff --git a/public/assets/d3/d3.min.js b/public/assets/d3/d3.min.js new file mode 100644 index 000000000..1984d1723 --- /dev/null +++ b/public/assets/d3/d3.min.js @@ -0,0 +1,5 @@ +!function(){function n(n){return n&&(n.ownerDocument||n.document||n).documentElement}function t(n){return n&&(n.ownerDocument&&n.ownerDocument.defaultView||n.document&&n||n.defaultView)}function e(n,t){return t>n?-1:n>t?1:n>=t?0:0/0}function r(n){return null===n?0/0:+n}function u(n){return!isNaN(n)}function i(n){return{left:function(t,e,r,u){for(arguments.length<3&&(r=0),arguments.length<4&&(u=t.length);u>r;){var i=r+u>>>1;n(t[i],e)<0?r=i+1:u=i}return r},right:function(t,e,r,u){for(arguments.length<3&&(r=0),arguments.length<4&&(u=t.length);u>r;){var i=r+u>>>1;n(t[i],e)>0?u=i:r=i+1}return r}}}function o(n){return n.length}function a(n){for(var t=1;n*t%1;)t*=10;return t}function c(n,t){for(var e in t)Object.defineProperty(n.prototype,e,{value:t[e],enumerable:!1})}function l(){this._=Object.create(null)}function s(n){return(n+="")===pa||n[0]===va?va+n:n}function f(n){return(n+="")[0]===va?n.slice(1):n}function h(n){return s(n)in this._}function g(n){return(n=s(n))in this._&&delete this._[n]}function p(){var n=[];for(var t in this._)n.push(f(t));return n}function v(){var n=0;for(var t in this._)++n;return n}function d(){for(var n in this._)return!1;return!0}function m(){this._=Object.create(null)}function y(n){return n}function M(n,t,e){return function(){var r=e.apply(t,arguments);return r===t?n:r}}function x(n,t){if(t in n)return t;t=t.charAt(0).toUpperCase()+t.slice(1);for(var e=0,r=da.length;r>e;++e){var u=da[e]+t;if(u in n)return u}}function b(){}function _(){}function w(n){function t(){for(var t,r=e,u=-1,i=r.length;++ue;e++)for(var u,i=n[e],o=0,a=i.length;a>o;o++)(u=i[o])&&t(u,o,e);return n}function Z(n){return ya(n,Sa),n}function V(n){var t,e;return function(r,u,i){var o,a=n[i].update,c=a.length;for(i!=e&&(e=i,t=0),u>=t&&(t=u+1);!(o=a[t])&&++t0&&(n=n.slice(0,a));var l=ka.get(n);return l&&(n=l,c=B),a?t?u:r:t?b:i}function $(n,t){return function(e){var r=ta.event;ta.event=e,t[0]=this.__data__;try{n.apply(this,t)}finally{ta.event=r}}}function B(n,t){var e=$(n,t);return function(n){var t=this,r=n.relatedTarget;r&&(r===t||8&r.compareDocumentPosition(t))||e.call(t,n)}}function W(e){var r=".dragsuppress-"+ ++Aa,u="click"+r,i=ta.select(t(e)).on("touchmove"+r,S).on("dragstart"+r,S).on("selectstart"+r,S);if(null==Ea&&(Ea="onselectstart"in e?!1:x(e.style,"userSelect")),Ea){var o=n(e).style,a=o[Ea];o[Ea]="none"}return function(n){if(i.on(r,null),Ea&&(o[Ea]=a),n){var t=function(){i.on(u,null)};i.on(u,function(){S(),t()},!0),setTimeout(t,0)}}}function J(n,e){e.changedTouches&&(e=e.changedTouches[0]);var r=n.ownerSVGElement||n;if(r.createSVGPoint){var u=r.createSVGPoint();if(0>Na){var i=t(n);if(i.scrollX||i.scrollY){r=ta.select("body").append("svg").style({position:"absolute",top:0,left:0,margin:0,padding:0,border:"none"},"important");var o=r[0][0].getScreenCTM();Na=!(o.f||o.e),r.remove()}}return Na?(u.x=e.pageX,u.y=e.pageY):(u.x=e.clientX,u.y=e.clientY),u=u.matrixTransform(n.getScreenCTM().inverse()),[u.x,u.y]}var a=n.getBoundingClientRect();return[e.clientX-a.left-n.clientLeft,e.clientY-a.top-n.clientTop]}function G(){return ta.event.changedTouches[0].identifier}function K(n){return n>0?1:0>n?-1:0}function Q(n,t,e){return(t[0]-n[0])*(e[1]-n[1])-(t[1]-n[1])*(e[0]-n[0])}function nt(n){return n>1?0:-1>n?qa:Math.acos(n)}function tt(n){return n>1?Ra:-1>n?-Ra:Math.asin(n)}function et(n){return((n=Math.exp(n))-1/n)/2}function rt(n){return((n=Math.exp(n))+1/n)/2}function ut(n){return((n=Math.exp(2*n))-1)/(n+1)}function it(n){return(n=Math.sin(n/2))*n}function ot(){}function at(n,t,e){return this instanceof at?(this.h=+n,this.s=+t,void(this.l=+e)):arguments.length<2?n instanceof at?new at(n.h,n.s,n.l):bt(""+n,_t,at):new at(n,t,e)}function ct(n,t,e){function r(n){return n>360?n-=360:0>n&&(n+=360),60>n?i+(o-i)*n/60:180>n?o:240>n?i+(o-i)*(240-n)/60:i}function u(n){return Math.round(255*r(n))}var i,o;return n=isNaN(n)?0:(n%=360)<0?n+360:n,t=isNaN(t)?0:0>t?0:t>1?1:t,e=0>e?0:e>1?1:e,o=.5>=e?e*(1+t):e+t-e*t,i=2*e-o,new mt(u(n+120),u(n),u(n-120))}function lt(n,t,e){return this instanceof lt?(this.h=+n,this.c=+t,void(this.l=+e)):arguments.length<2?n instanceof lt?new lt(n.h,n.c,n.l):n instanceof ft?gt(n.l,n.a,n.b):gt((n=wt((n=ta.rgb(n)).r,n.g,n.b)).l,n.a,n.b):new lt(n,t,e)}function st(n,t,e){return isNaN(n)&&(n=0),isNaN(t)&&(t=0),new ft(e,Math.cos(n*=Da)*t,Math.sin(n)*t)}function ft(n,t,e){return this instanceof ft?(this.l=+n,this.a=+t,void(this.b=+e)):arguments.length<2?n instanceof ft?new ft(n.l,n.a,n.b):n instanceof lt?st(n.h,n.c,n.l):wt((n=mt(n)).r,n.g,n.b):new ft(n,t,e)}function ht(n,t,e){var r=(n+16)/116,u=r+t/500,i=r-e/200;return u=pt(u)*Xa,r=pt(r)*$a,i=pt(i)*Ba,new mt(dt(3.2404542*u-1.5371385*r-.4985314*i),dt(-.969266*u+1.8760108*r+.041556*i),dt(.0556434*u-.2040259*r+1.0572252*i))}function gt(n,t,e){return n>0?new lt(Math.atan2(e,t)*Pa,Math.sqrt(t*t+e*e),n):new lt(0/0,0/0,n)}function pt(n){return n>.206893034?n*n*n:(n-4/29)/7.787037}function vt(n){return n>.008856?Math.pow(n,1/3):7.787037*n+4/29}function dt(n){return Math.round(255*(.00304>=n?12.92*n:1.055*Math.pow(n,1/2.4)-.055))}function mt(n,t,e){return this instanceof mt?(this.r=~~n,this.g=~~t,void(this.b=~~e)):arguments.length<2?n instanceof mt?new mt(n.r,n.g,n.b):bt(""+n,mt,ct):new mt(n,t,e)}function yt(n){return new mt(n>>16,n>>8&255,255&n)}function Mt(n){return yt(n)+""}function xt(n){return 16>n?"0"+Math.max(0,n).toString(16):Math.min(255,n).toString(16)}function bt(n,t,e){var r,u,i,o=0,a=0,c=0;if(r=/([a-z]+)\((.*)\)/.exec(n=n.toLowerCase()))switch(u=r[2].split(","),r[1]){case"hsl":return e(parseFloat(u[0]),parseFloat(u[1])/100,parseFloat(u[2])/100);case"rgb":return t(kt(u[0]),kt(u[1]),kt(u[2]))}return(i=Ga.get(n))?t(i.r,i.g,i.b):(null==n||"#"!==n.charAt(0)||isNaN(i=parseInt(n.slice(1),16))||(4===n.length?(o=(3840&i)>>4,o=o>>4|o,a=240&i,a=a>>4|a,c=15&i,c=c<<4|c):7===n.length&&(o=(16711680&i)>>16,a=(65280&i)>>8,c=255&i)),t(o,a,c))}function _t(n,t,e){var r,u,i=Math.min(n/=255,t/=255,e/=255),o=Math.max(n,t,e),a=o-i,c=(o+i)/2;return a?(u=.5>c?a/(o+i):a/(2-o-i),r=n==o?(t-e)/a+(e>t?6:0):t==o?(e-n)/a+2:(n-t)/a+4,r*=60):(r=0/0,u=c>0&&1>c?0:r),new at(r,u,c)}function wt(n,t,e){n=St(n),t=St(t),e=St(e);var r=vt((.4124564*n+.3575761*t+.1804375*e)/Xa),u=vt((.2126729*n+.7151522*t+.072175*e)/$a),i=vt((.0193339*n+.119192*t+.9503041*e)/Ba);return ft(116*u-16,500*(r-u),200*(u-i))}function St(n){return(n/=255)<=.04045?n/12.92:Math.pow((n+.055)/1.055,2.4)}function kt(n){var t=parseFloat(n);return"%"===n.charAt(n.length-1)?Math.round(2.55*t):t}function Et(n){return"function"==typeof n?n:function(){return n}}function At(n){return function(t,e,r){return 2===arguments.length&&"function"==typeof e&&(r=e,e=null),Nt(t,e,n,r)}}function Nt(n,t,e,r){function u(){var n,t=c.status;if(!t&&zt(c)||t>=200&&300>t||304===t){try{n=e.call(i,c)}catch(r){return void o.error.call(i,r)}o.load.call(i,n)}else o.error.call(i,c)}var i={},o=ta.dispatch("beforesend","progress","load","error"),a={},c=new XMLHttpRequest,l=null;return!this.XDomainRequest||"withCredentials"in c||!/^(http(s)?:)?\/\//.test(n)||(c=new XDomainRequest),"onload"in c?c.onload=c.onerror=u:c.onreadystatechange=function(){c.readyState>3&&u()},c.onprogress=function(n){var t=ta.event;ta.event=n;try{o.progress.call(i,c)}finally{ta.event=t}},i.header=function(n,t){return n=(n+"").toLowerCase(),arguments.length<2?a[n]:(null==t?delete a[n]:a[n]=t+"",i)},i.mimeType=function(n){return arguments.length?(t=null==n?null:n+"",i):t},i.responseType=function(n){return arguments.length?(l=n,i):l},i.response=function(n){return e=n,i},["get","post"].forEach(function(n){i[n]=function(){return i.send.apply(i,[n].concat(ra(arguments)))}}),i.send=function(e,r,u){if(2===arguments.length&&"function"==typeof r&&(u=r,r=null),c.open(e,n,!0),null==t||"accept"in a||(a.accept=t+",*/*"),c.setRequestHeader)for(var s in a)c.setRequestHeader(s,a[s]);return null!=t&&c.overrideMimeType&&c.overrideMimeType(t),null!=l&&(c.responseType=l),null!=u&&i.on("error",u).on("load",function(n){u(null,n)}),o.beforesend.call(i,c),c.send(null==r?null:r),i},i.abort=function(){return c.abort(),i},ta.rebind(i,o,"on"),null==r?i:i.get(Ct(r))}function Ct(n){return 1===n.length?function(t,e){n(null==t?e:null)}:n}function zt(n){var t=n.responseType;return t&&"text"!==t?n.response:n.responseText}function qt(){var n=Lt(),t=Tt()-n;t>24?(isFinite(t)&&(clearTimeout(tc),tc=setTimeout(qt,t)),nc=0):(nc=1,rc(qt))}function Lt(){var n=Date.now();for(ec=Ka;ec;)n>=ec.t&&(ec.f=ec.c(n-ec.t)),ec=ec.n;return n}function Tt(){for(var n,t=Ka,e=1/0;t;)t.f?t=n?n.n=t.n:Ka=t.n:(t.t8?function(n){return n/e}:function(n){return n*e},symbol:n}}function Pt(n){var t=n.decimal,e=n.thousands,r=n.grouping,u=n.currency,i=r&&e?function(n,t){for(var u=n.length,i=[],o=0,a=r[0],c=0;u>0&&a>0&&(c+a+1>t&&(a=Math.max(1,t-c)),i.push(n.substring(u-=a,u+a)),!((c+=a+1)>t));)a=r[o=(o+1)%r.length];return i.reverse().join(e)}:y;return function(n){var e=ic.exec(n),r=e[1]||" ",o=e[2]||">",a=e[3]||"-",c=e[4]||"",l=e[5],s=+e[6],f=e[7],h=e[8],g=e[9],p=1,v="",d="",m=!1,y=!0;switch(h&&(h=+h.substring(1)),(l||"0"===r&&"="===o)&&(l=r="0",o="="),g){case"n":f=!0,g="g";break;case"%":p=100,d="%",g="f";break;case"p":p=100,d="%",g="r";break;case"b":case"o":case"x":case"X":"#"===c&&(v="0"+g.toLowerCase());case"c":y=!1;case"d":m=!0,h=0;break;case"s":p=-1,g="r"}"$"===c&&(v=u[0],d=u[1]),"r"!=g||h||(g="g"),null!=h&&("g"==g?h=Math.max(1,Math.min(21,h)):("e"==g||"f"==g)&&(h=Math.max(0,Math.min(20,h)))),g=oc.get(g)||Ut;var M=l&&f;return function(n){var e=d;if(m&&n%1)return"";var u=0>n||0===n&&0>1/n?(n=-n,"-"):"-"===a?"":a;if(0>p){var c=ta.formatPrefix(n,h);n=c.scale(n),e=c.symbol+d}else n*=p;n=g(n,h);var x,b,_=n.lastIndexOf(".");if(0>_){var w=y?n.lastIndexOf("e"):-1;0>w?(x=n,b=""):(x=n.substring(0,w),b=n.substring(w))}else x=n.substring(0,_),b=t+n.substring(_+1);!l&&f&&(x=i(x,1/0));var S=v.length+x.length+b.length+(M?0:u.length),k=s>S?new Array(S=s-S+1).join(r):"";return M&&(x=i(k+x,k.length?s-b.length:1/0)),u+=v,n=x+b,("<"===o?u+n+k:">"===o?k+u+n:"^"===o?k.substring(0,S>>=1)+u+n+k.substring(S):u+(M?n:k+n))+e}}}function Ut(n){return n+""}function jt(){this._=new Date(arguments.length>1?Date.UTC.apply(this,arguments):arguments[0])}function Ft(n,t,e){function r(t){var e=n(t),r=i(e,1);return r-t>t-e?e:r}function u(e){return t(e=n(new cc(e-1)),1),e}function i(n,e){return t(n=new cc(+n),e),n}function o(n,r,i){var o=u(n),a=[];if(i>1)for(;r>o;)e(o)%i||a.push(new Date(+o)),t(o,1);else for(;r>o;)a.push(new Date(+o)),t(o,1);return a}function a(n,t,e){try{cc=jt;var r=new jt;return r._=n,o(r,t,e)}finally{cc=Date}}n.floor=n,n.round=r,n.ceil=u,n.offset=i,n.range=o;var c=n.utc=Ht(n);return c.floor=c,c.round=Ht(r),c.ceil=Ht(u),c.offset=Ht(i),c.range=a,n}function Ht(n){return function(t,e){try{cc=jt;var r=new jt;return r._=t,n(r,e)._}finally{cc=Date}}}function Ot(n){function t(n){function t(t){for(var e,u,i,o=[],a=-1,c=0;++aa;){if(r>=l)return-1;if(u=t.charCodeAt(a++),37===u){if(o=t.charAt(a++),i=C[o in sc?t.charAt(a++):o],!i||(r=i(n,e,r))<0)return-1}else if(u!=e.charCodeAt(r++))return-1}return r}function r(n,t,e){_.lastIndex=0;var r=_.exec(t.slice(e));return r?(n.w=w.get(r[0].toLowerCase()),e+r[0].length):-1}function u(n,t,e){x.lastIndex=0;var r=x.exec(t.slice(e));return r?(n.w=b.get(r[0].toLowerCase()),e+r[0].length):-1}function i(n,t,e){E.lastIndex=0;var r=E.exec(t.slice(e));return r?(n.m=A.get(r[0].toLowerCase()),e+r[0].length):-1}function o(n,t,e){S.lastIndex=0;var r=S.exec(t.slice(e));return r?(n.m=k.get(r[0].toLowerCase()),e+r[0].length):-1}function a(n,t,r){return e(n,N.c.toString(),t,r)}function c(n,t,r){return e(n,N.x.toString(),t,r)}function l(n,t,r){return e(n,N.X.toString(),t,r)}function s(n,t,e){var r=M.get(t.slice(e,e+=2).toLowerCase());return null==r?-1:(n.p=r,e)}var f=n.dateTime,h=n.date,g=n.time,p=n.periods,v=n.days,d=n.shortDays,m=n.months,y=n.shortMonths;t.utc=function(n){function e(n){try{cc=jt;var t=new cc;return t._=n,r(t)}finally{cc=Date}}var r=t(n);return e.parse=function(n){try{cc=jt;var t=r.parse(n);return t&&t._}finally{cc=Date}},e.toString=r.toString,e},t.multi=t.utc.multi=ae;var M=ta.map(),x=Yt(v),b=Zt(v),_=Yt(d),w=Zt(d),S=Yt(m),k=Zt(m),E=Yt(y),A=Zt(y);p.forEach(function(n,t){M.set(n.toLowerCase(),t)});var N={a:function(n){return d[n.getDay()]},A:function(n){return v[n.getDay()]},b:function(n){return y[n.getMonth()]},B:function(n){return m[n.getMonth()]},c:t(f),d:function(n,t){return It(n.getDate(),t,2)},e:function(n,t){return It(n.getDate(),t,2)},H:function(n,t){return It(n.getHours(),t,2)},I:function(n,t){return It(n.getHours()%12||12,t,2)},j:function(n,t){return It(1+ac.dayOfYear(n),t,3)},L:function(n,t){return It(n.getMilliseconds(),t,3)},m:function(n,t){return It(n.getMonth()+1,t,2)},M:function(n,t){return It(n.getMinutes(),t,2)},p:function(n){return p[+(n.getHours()>=12)]},S:function(n,t){return It(n.getSeconds(),t,2)},U:function(n,t){return It(ac.sundayOfYear(n),t,2)},w:function(n){return n.getDay()},W:function(n,t){return It(ac.mondayOfYear(n),t,2)},x:t(h),X:t(g),y:function(n,t){return It(n.getFullYear()%100,t,2)},Y:function(n,t){return It(n.getFullYear()%1e4,t,4)},Z:ie,"%":function(){return"%"}},C={a:r,A:u,b:i,B:o,c:a,d:Qt,e:Qt,H:te,I:te,j:ne,L:ue,m:Kt,M:ee,p:s,S:re,U:Xt,w:Vt,W:$t,x:c,X:l,y:Wt,Y:Bt,Z:Jt,"%":oe};return t}function It(n,t,e){var r=0>n?"-":"",u=(r?-n:n)+"",i=u.length;return r+(e>i?new Array(e-i+1).join(t)+u:u)}function Yt(n){return new RegExp("^(?:"+n.map(ta.requote).join("|")+")","i")}function Zt(n){for(var t=new l,e=-1,r=n.length;++e68?1900:2e3)}function Kt(n,t,e){fc.lastIndex=0;var r=fc.exec(t.slice(e,e+2));return r?(n.m=r[0]-1,e+r[0].length):-1}function Qt(n,t,e){fc.lastIndex=0;var r=fc.exec(t.slice(e,e+2));return r?(n.d=+r[0],e+r[0].length):-1}function ne(n,t,e){fc.lastIndex=0;var r=fc.exec(t.slice(e,e+3));return r?(n.j=+r[0],e+r[0].length):-1}function te(n,t,e){fc.lastIndex=0;var r=fc.exec(t.slice(e,e+2));return r?(n.H=+r[0],e+r[0].length):-1}function ee(n,t,e){fc.lastIndex=0;var r=fc.exec(t.slice(e,e+2));return r?(n.M=+r[0],e+r[0].length):-1}function re(n,t,e){fc.lastIndex=0;var r=fc.exec(t.slice(e,e+2));return r?(n.S=+r[0],e+r[0].length):-1}function ue(n,t,e){fc.lastIndex=0;var r=fc.exec(t.slice(e,e+3));return r?(n.L=+r[0],e+r[0].length):-1}function ie(n){var t=n.getTimezoneOffset(),e=t>0?"-":"+",r=ga(t)/60|0,u=ga(t)%60;return e+It(r,"0",2)+It(u,"0",2)}function oe(n,t,e){hc.lastIndex=0;var r=hc.exec(t.slice(e,e+1));return r?e+r[0].length:-1}function ae(n){for(var t=n.length,e=-1;++e=0?1:-1,a=o*e,c=Math.cos(t),l=Math.sin(t),s=i*l,f=u*c+s*Math.cos(a),h=s*o*Math.sin(a);yc.add(Math.atan2(h,f)),r=n,u=c,i=l}var t,e,r,u,i;Mc.point=function(o,a){Mc.point=n,r=(t=o)*Da,u=Math.cos(a=(e=a)*Da/2+qa/4),i=Math.sin(a)},Mc.lineEnd=function(){n(t,e)}}function pe(n){var t=n[0],e=n[1],r=Math.cos(e);return[r*Math.cos(t),r*Math.sin(t),Math.sin(e)]}function ve(n,t){return n[0]*t[0]+n[1]*t[1]+n[2]*t[2]}function de(n,t){return[n[1]*t[2]-n[2]*t[1],n[2]*t[0]-n[0]*t[2],n[0]*t[1]-n[1]*t[0]]}function me(n,t){n[0]+=t[0],n[1]+=t[1],n[2]+=t[2]}function ye(n,t){return[n[0]*t,n[1]*t,n[2]*t]}function Me(n){var t=Math.sqrt(n[0]*n[0]+n[1]*n[1]+n[2]*n[2]);n[0]/=t,n[1]/=t,n[2]/=t}function xe(n){return[Math.atan2(n[1],n[0]),tt(n[2])]}function be(n,t){return ga(n[0]-t[0])a;++a)u.point((e=n[a])[0],e[1]);return void u.lineEnd()}var c=new qe(e,n,null,!0),l=new qe(e,null,c,!1);c.o=l,i.push(c),o.push(l),c=new qe(r,n,null,!1),l=new qe(r,null,c,!0),c.o=l,i.push(c),o.push(l)}}),o.sort(t),ze(i),ze(o),i.length){for(var a=0,c=e,l=o.length;l>a;++a)o[a].e=c=!c;for(var s,f,h=i[0];;){for(var g=h,p=!0;g.v;)if((g=g.n)===h)return;s=g.z,u.lineStart();do{if(g.v=g.o.v=!0,g.e){if(p)for(var a=0,l=s.length;l>a;++a)u.point((f=s[a])[0],f[1]);else r(g.x,g.n.x,1,u);g=g.n}else{if(p){s=g.p.z;for(var a=s.length-1;a>=0;--a)u.point((f=s[a])[0],f[1])}else r(g.x,g.p.x,-1,u);g=g.p}g=g.o,s=g.z,p=!p}while(!g.v);u.lineEnd()}}}function ze(n){if(t=n.length){for(var t,e,r=0,u=n[0];++r0){for(b||(i.polygonStart(),b=!0),i.lineStart();++o1&&2&t&&e.push(e.pop().concat(e.shift())),g.push(e.filter(Te))}var g,p,v,d=t(i),m=u.invert(r[0],r[1]),y={point:o,lineStart:c,lineEnd:l,polygonStart:function(){y.point=s,y.lineStart=f,y.lineEnd=h,g=[],p=[]},polygonEnd:function(){y.point=o,y.lineStart=c,y.lineEnd=l,g=ta.merge(g);var n=Fe(m,p);g.length?(b||(i.polygonStart(),b=!0),Ce(g,De,n,e,i)):n&&(b||(i.polygonStart(),b=!0),i.lineStart(),e(null,null,1,i),i.lineEnd()),b&&(i.polygonEnd(),b=!1),g=p=null},sphere:function(){i.polygonStart(),i.lineStart(),e(null,null,1,i),i.lineEnd(),i.polygonEnd()}},M=Re(),x=t(M),b=!1;return y}}function Te(n){return n.length>1}function Re(){var n,t=[];return{lineStart:function(){t.push(n=[])},point:function(t,e){n.push([t,e])},lineEnd:b,buffer:function(){var e=t;return t=[],n=null,e},rejoin:function(){t.length>1&&t.push(t.pop().concat(t.shift()))}}}function De(n,t){return((n=n.x)[0]<0?n[1]-Ra-Ca:Ra-n[1])-((t=t.x)[0]<0?t[1]-Ra-Ca:Ra-t[1])}function Pe(n){var t,e=0/0,r=0/0,u=0/0;return{lineStart:function(){n.lineStart(),t=1},point:function(i,o){var a=i>0?qa:-qa,c=ga(i-e);ga(c-qa)0?Ra:-Ra),n.point(u,r),n.lineEnd(),n.lineStart(),n.point(a,r),n.point(i,r),t=0):u!==a&&c>=qa&&(ga(e-u)Ca?Math.atan((Math.sin(t)*(i=Math.cos(r))*Math.sin(e)-Math.sin(r)*(u=Math.cos(t))*Math.sin(n))/(u*i*o)):(t+r)/2}function je(n,t,e,r){var u;if(null==n)u=e*Ra,r.point(-qa,u),r.point(0,u),r.point(qa,u),r.point(qa,0),r.point(qa,-u),r.point(0,-u),r.point(-qa,-u),r.point(-qa,0),r.point(-qa,u);else if(ga(n[0]-t[0])>Ca){var i=n[0]a;++a){var l=t[a],s=l.length;if(s)for(var f=l[0],h=f[0],g=f[1]/2+qa/4,p=Math.sin(g),v=Math.cos(g),d=1;;){d===s&&(d=0),n=l[d];var m=n[0],y=n[1]/2+qa/4,M=Math.sin(y),x=Math.cos(y),b=m-h,_=b>=0?1:-1,w=_*b,S=w>qa,k=p*M;if(yc.add(Math.atan2(k*_*Math.sin(w),v*x+k*Math.cos(w))),i+=S?b+_*La:b,S^h>=e^m>=e){var E=de(pe(f),pe(n));Me(E);var A=de(u,E);Me(A);var N=(S^b>=0?-1:1)*tt(A[2]);(r>N||r===N&&(E[0]||E[1]))&&(o+=S^b>=0?1:-1)}if(!d++)break;h=m,p=M,v=x,f=n}}return(-Ca>i||Ca>i&&0>yc)^1&o}function He(n){function t(n,t){return Math.cos(n)*Math.cos(t)>i}function e(n){var e,i,c,l,s;return{lineStart:function(){l=c=!1,s=1},point:function(f,h){var g,p=[f,h],v=t(f,h),d=o?v?0:u(f,h):v?u(f+(0>f?qa:-qa),h):0;if(!e&&(l=c=v)&&n.lineStart(),v!==c&&(g=r(e,p),(be(e,g)||be(p,g))&&(p[0]+=Ca,p[1]+=Ca,v=t(p[0],p[1]))),v!==c)s=0,v?(n.lineStart(),g=r(p,e),n.point(g[0],g[1])):(g=r(e,p),n.point(g[0],g[1]),n.lineEnd()),e=g;else if(a&&e&&o^v){var m;d&i||!(m=r(p,e,!0))||(s=0,o?(n.lineStart(),n.point(m[0][0],m[0][1]),n.point(m[1][0],m[1][1]),n.lineEnd()):(n.point(m[1][0],m[1][1]),n.lineEnd(),n.lineStart(),n.point(m[0][0],m[0][1])))}!v||e&&be(e,p)||n.point(p[0],p[1]),e=p,c=v,i=d},lineEnd:function(){c&&n.lineEnd(),e=null},clean:function(){return s|(l&&c)<<1}}}function r(n,t,e){var r=pe(n),u=pe(t),o=[1,0,0],a=de(r,u),c=ve(a,a),l=a[0],s=c-l*l;if(!s)return!e&&n;var f=i*c/s,h=-i*l/s,g=de(o,a),p=ye(o,f),v=ye(a,h);me(p,v);var d=g,m=ve(p,d),y=ve(d,d),M=m*m-y*(ve(p,p)-1);if(!(0>M)){var x=Math.sqrt(M),b=ye(d,(-m-x)/y);if(me(b,p),b=xe(b),!e)return b;var _,w=n[0],S=t[0],k=n[1],E=t[1];w>S&&(_=w,w=S,S=_);var A=S-w,N=ga(A-qa)A;if(!N&&k>E&&(_=k,k=E,E=_),C?N?k+E>0^b[1]<(ga(b[0]-w)qa^(w<=b[0]&&b[0]<=S)){var z=ye(d,(-m+x)/y);return me(z,p),[b,xe(z)]}}}function u(t,e){var r=o?n:qa-n,u=0;return-r>t?u|=1:t>r&&(u|=2),-r>e?u|=4:e>r&&(u|=8),u}var i=Math.cos(n),o=i>0,a=ga(i)>Ca,c=gr(n,6*Da);return Le(t,e,c,o?[0,-n]:[-qa,n-qa])}function Oe(n,t,e,r){return function(u){var i,o=u.a,a=u.b,c=o.x,l=o.y,s=a.x,f=a.y,h=0,g=1,p=s-c,v=f-l;if(i=n-c,p||!(i>0)){if(i/=p,0>p){if(h>i)return;g>i&&(g=i)}else if(p>0){if(i>g)return;i>h&&(h=i)}if(i=e-c,p||!(0>i)){if(i/=p,0>p){if(i>g)return;i>h&&(h=i)}else if(p>0){if(h>i)return;g>i&&(g=i)}if(i=t-l,v||!(i>0)){if(i/=v,0>v){if(h>i)return;g>i&&(g=i)}else if(v>0){if(i>g)return;i>h&&(h=i)}if(i=r-l,v||!(0>i)){if(i/=v,0>v){if(i>g)return;i>h&&(h=i)}else if(v>0){if(h>i)return;g>i&&(g=i)}return h>0&&(u.a={x:c+h*p,y:l+h*v}),1>g&&(u.b={x:c+g*p,y:l+g*v}),u}}}}}}function Ie(n,t,e,r){function u(r,u){return ga(r[0]-n)0?0:3:ga(r[0]-e)0?2:1:ga(r[1]-t)0?1:0:u>0?3:2}function i(n,t){return o(n.x,t.x)}function o(n,t){var e=u(n,1),r=u(t,1);return e!==r?e-r:0===e?t[1]-n[1]:1===e?n[0]-t[0]:2===e?n[1]-t[1]:t[0]-n[0]}return function(a){function c(n){for(var t=0,e=d.length,r=n[1],u=0;e>u;++u)for(var i,o=1,a=d[u],c=a.length,l=a[0];c>o;++o)i=a[o],l[1]<=r?i[1]>r&&Q(l,i,n)>0&&++t:i[1]<=r&&Q(l,i,n)<0&&--t,l=i;return 0!==t}function l(i,a,c,l){var s=0,f=0;if(null==i||(s=u(i,c))!==(f=u(a,c))||o(i,a)<0^c>0){do l.point(0===s||3===s?n:e,s>1?r:t);while((s=(s+c+4)%4)!==f)}else l.point(a[0],a[1])}function s(u,i){return u>=n&&e>=u&&i>=t&&r>=i}function f(n,t){s(n,t)&&a.point(n,t)}function h(){C.point=p,d&&d.push(m=[]),S=!0,w=!1,b=_=0/0}function g(){v&&(p(y,M),x&&w&&A.rejoin(),v.push(A.buffer())),C.point=f,w&&a.lineEnd()}function p(n,t){n=Math.max(-Tc,Math.min(Tc,n)),t=Math.max(-Tc,Math.min(Tc,t));var e=s(n,t);if(d&&m.push([n,t]),S)y=n,M=t,x=e,S=!1,e&&(a.lineStart(),a.point(n,t));else if(e&&w)a.point(n,t);else{var r={a:{x:b,y:_},b:{x:n,y:t}};N(r)?(w||(a.lineStart(),a.point(r.a.x,r.a.y)),a.point(r.b.x,r.b.y),e||a.lineEnd(),k=!1):e&&(a.lineStart(),a.point(n,t),k=!1)}b=n,_=t,w=e}var v,d,m,y,M,x,b,_,w,S,k,E=a,A=Re(),N=Oe(n,t,e,r),C={point:f,lineStart:h,lineEnd:g,polygonStart:function(){a=A,v=[],d=[],k=!0},polygonEnd:function(){a=E,v=ta.merge(v);var t=c([n,r]),e=k&&t,u=v.length;(e||u)&&(a.polygonStart(),e&&(a.lineStart(),l(null,null,1,a),a.lineEnd()),u&&Ce(v,i,t,l,a),a.polygonEnd()),v=d=m=null}};return C}}function Ye(n){var t=0,e=qa/3,r=ir(n),u=r(t,e);return u.parallels=function(n){return arguments.length?r(t=n[0]*qa/180,e=n[1]*qa/180):[t/qa*180,e/qa*180]},u}function Ze(n,t){function e(n,t){var e=Math.sqrt(i-2*u*Math.sin(t))/u;return[e*Math.sin(n*=u),o-e*Math.cos(n)]}var r=Math.sin(n),u=(r+Math.sin(t))/2,i=1+r*(2*u-r),o=Math.sqrt(i)/u;return e.invert=function(n,t){var e=o-t;return[Math.atan2(n,e)/u,tt((i-(n*n+e*e)*u*u)/(2*u))]},e}function Ve(){function n(n,t){Dc+=u*n-r*t,r=n,u=t}var t,e,r,u;Hc.point=function(i,o){Hc.point=n,t=r=i,e=u=o},Hc.lineEnd=function(){n(t,e)}}function Xe(n,t){Pc>n&&(Pc=n),n>jc&&(jc=n),Uc>t&&(Uc=t),t>Fc&&(Fc=t)}function $e(){function n(n,t){o.push("M",n,",",t,i)}function t(n,t){o.push("M",n,",",t),a.point=e}function e(n,t){o.push("L",n,",",t)}function r(){a.point=n}function u(){o.push("Z")}var i=Be(4.5),o=[],a={point:n,lineStart:function(){a.point=t},lineEnd:r,polygonStart:function(){a.lineEnd=u},polygonEnd:function(){a.lineEnd=r,a.point=n},pointRadius:function(n){return i=Be(n),a},result:function(){if(o.length){var n=o.join("");return o=[],n}}};return a}function Be(n){return"m0,"+n+"a"+n+","+n+" 0 1,1 0,"+-2*n+"a"+n+","+n+" 0 1,1 0,"+2*n+"z"}function We(n,t){_c+=n,wc+=t,++Sc}function Je(){function n(n,r){var u=n-t,i=r-e,o=Math.sqrt(u*u+i*i);kc+=o*(t+n)/2,Ec+=o*(e+r)/2,Ac+=o,We(t=n,e=r)}var t,e;Ic.point=function(r,u){Ic.point=n,We(t=r,e=u)}}function Ge(){Ic.point=We}function Ke(){function n(n,t){var e=n-r,i=t-u,o=Math.sqrt(e*e+i*i);kc+=o*(r+n)/2,Ec+=o*(u+t)/2,Ac+=o,o=u*n-r*t,Nc+=o*(r+n),Cc+=o*(u+t),zc+=3*o,We(r=n,u=t)}var t,e,r,u;Ic.point=function(i,o){Ic.point=n,We(t=r=i,e=u=o)},Ic.lineEnd=function(){n(t,e)}}function Qe(n){function t(t,e){n.moveTo(t+o,e),n.arc(t,e,o,0,La)}function e(t,e){n.moveTo(t,e),a.point=r}function r(t,e){n.lineTo(t,e)}function u(){a.point=t}function i(){n.closePath()}var o=4.5,a={point:t,lineStart:function(){a.point=e},lineEnd:u,polygonStart:function(){a.lineEnd=i},polygonEnd:function(){a.lineEnd=u,a.point=t},pointRadius:function(n){return o=n,a},result:b};return a}function nr(n){function t(n){return(a?r:e)(n)}function e(t){return rr(t,function(e,r){e=n(e,r),t.point(e[0],e[1])})}function r(t){function e(e,r){e=n(e,r),t.point(e[0],e[1])}function r(){M=0/0,S.point=i,t.lineStart()}function i(e,r){var i=pe([e,r]),o=n(e,r);u(M,x,y,b,_,w,M=o[0],x=o[1],y=e,b=i[0],_=i[1],w=i[2],a,t),t.point(M,x)}function o(){S.point=e,t.lineEnd()}function c(){r(),S.point=l,S.lineEnd=s}function l(n,t){i(f=n,h=t),g=M,p=x,v=b,d=_,m=w,S.point=i}function s(){u(M,x,y,b,_,w,g,p,f,v,d,m,a,t),S.lineEnd=o,o()}var f,h,g,p,v,d,m,y,M,x,b,_,w,S={point:e,lineStart:r,lineEnd:o,polygonStart:function(){t.polygonStart(),S.lineStart=c +},polygonEnd:function(){t.polygonEnd(),S.lineStart=r}};return S}function u(t,e,r,a,c,l,s,f,h,g,p,v,d,m){var y=s-t,M=f-e,x=y*y+M*M;if(x>4*i&&d--){var b=a+g,_=c+p,w=l+v,S=Math.sqrt(b*b+_*_+w*w),k=Math.asin(w/=S),E=ga(ga(w)-1)i||ga((y*z+M*q)/x-.5)>.3||o>a*g+c*p+l*v)&&(u(t,e,r,a,c,l,N,C,E,b/=S,_/=S,w,d,m),m.point(N,C),u(N,C,E,b,_,w,s,f,h,g,p,v,d,m))}}var i=.5,o=Math.cos(30*Da),a=16;return t.precision=function(n){return arguments.length?(a=(i=n*n)>0&&16,t):Math.sqrt(i)},t}function tr(n){var t=nr(function(t,e){return n([t*Pa,e*Pa])});return function(n){return or(t(n))}}function er(n){this.stream=n}function rr(n,t){return{point:t,sphere:function(){n.sphere()},lineStart:function(){n.lineStart()},lineEnd:function(){n.lineEnd()},polygonStart:function(){n.polygonStart()},polygonEnd:function(){n.polygonEnd()}}}function ur(n){return ir(function(){return n})()}function ir(n){function t(n){return n=a(n[0]*Da,n[1]*Da),[n[0]*h+c,l-n[1]*h]}function e(n){return n=a.invert((n[0]-c)/h,(l-n[1])/h),n&&[n[0]*Pa,n[1]*Pa]}function r(){a=Ae(o=lr(m,M,x),i);var n=i(v,d);return c=g-n[0]*h,l=p+n[1]*h,u()}function u(){return s&&(s.valid=!1,s=null),t}var i,o,a,c,l,s,f=nr(function(n,t){return n=i(n,t),[n[0]*h+c,l-n[1]*h]}),h=150,g=480,p=250,v=0,d=0,m=0,M=0,x=0,b=Lc,_=y,w=null,S=null;return t.stream=function(n){return s&&(s.valid=!1),s=or(b(o,f(_(n)))),s.valid=!0,s},t.clipAngle=function(n){return arguments.length?(b=null==n?(w=n,Lc):He((w=+n)*Da),u()):w},t.clipExtent=function(n){return arguments.length?(S=n,_=n?Ie(n[0][0],n[0][1],n[1][0],n[1][1]):y,u()):S},t.scale=function(n){return arguments.length?(h=+n,r()):h},t.translate=function(n){return arguments.length?(g=+n[0],p=+n[1],r()):[g,p]},t.center=function(n){return arguments.length?(v=n[0]%360*Da,d=n[1]%360*Da,r()):[v*Pa,d*Pa]},t.rotate=function(n){return arguments.length?(m=n[0]%360*Da,M=n[1]%360*Da,x=n.length>2?n[2]%360*Da:0,r()):[m*Pa,M*Pa,x*Pa]},ta.rebind(t,f,"precision"),function(){return i=n.apply(this,arguments),t.invert=i.invert&&e,r()}}function or(n){return rr(n,function(t,e){n.point(t*Da,e*Da)})}function ar(n,t){return[n,t]}function cr(n,t){return[n>qa?n-La:-qa>n?n+La:n,t]}function lr(n,t,e){return n?t||e?Ae(fr(n),hr(t,e)):fr(n):t||e?hr(t,e):cr}function sr(n){return function(t,e){return t+=n,[t>qa?t-La:-qa>t?t+La:t,e]}}function fr(n){var t=sr(n);return t.invert=sr(-n),t}function hr(n,t){function e(n,t){var e=Math.cos(t),a=Math.cos(n)*e,c=Math.sin(n)*e,l=Math.sin(t),s=l*r+a*u;return[Math.atan2(c*i-s*o,a*r-l*u),tt(s*i+c*o)]}var r=Math.cos(n),u=Math.sin(n),i=Math.cos(t),o=Math.sin(t);return e.invert=function(n,t){var e=Math.cos(t),a=Math.cos(n)*e,c=Math.sin(n)*e,l=Math.sin(t),s=l*i-c*o;return[Math.atan2(c*i+l*o,a*r+s*u),tt(s*r-a*u)]},e}function gr(n,t){var e=Math.cos(n),r=Math.sin(n);return function(u,i,o,a){var c=o*t;null!=u?(u=pr(e,u),i=pr(e,i),(o>0?i>u:u>i)&&(u+=o*La)):(u=n+o*La,i=n-.5*c);for(var l,s=u;o>0?s>i:i>s;s-=c)a.point((l=xe([e,-r*Math.cos(s),-r*Math.sin(s)]))[0],l[1])}}function pr(n,t){var e=pe(t);e[0]-=n,Me(e);var r=nt(-e[1]);return((-e[2]<0?-r:r)+2*Math.PI-Ca)%(2*Math.PI)}function vr(n,t,e){var r=ta.range(n,t-Ca,e).concat(t);return function(n){return r.map(function(t){return[n,t]})}}function dr(n,t,e){var r=ta.range(n,t-Ca,e).concat(t);return function(n){return r.map(function(t){return[t,n]})}}function mr(n){return n.source}function yr(n){return n.target}function Mr(n,t,e,r){var u=Math.cos(t),i=Math.sin(t),o=Math.cos(r),a=Math.sin(r),c=u*Math.cos(n),l=u*Math.sin(n),s=o*Math.cos(e),f=o*Math.sin(e),h=2*Math.asin(Math.sqrt(it(r-t)+u*o*it(e-n))),g=1/Math.sin(h),p=h?function(n){var t=Math.sin(n*=h)*g,e=Math.sin(h-n)*g,r=e*c+t*s,u=e*l+t*f,o=e*i+t*a;return[Math.atan2(u,r)*Pa,Math.atan2(o,Math.sqrt(r*r+u*u))*Pa]}:function(){return[n*Pa,t*Pa]};return p.distance=h,p}function xr(){function n(n,u){var i=Math.sin(u*=Da),o=Math.cos(u),a=ga((n*=Da)-t),c=Math.cos(a);Yc+=Math.atan2(Math.sqrt((a=o*Math.sin(a))*a+(a=r*i-e*o*c)*a),e*i+r*o*c),t=n,e=i,r=o}var t,e,r;Zc.point=function(u,i){t=u*Da,e=Math.sin(i*=Da),r=Math.cos(i),Zc.point=n},Zc.lineEnd=function(){Zc.point=Zc.lineEnd=b}}function br(n,t){function e(t,e){var r=Math.cos(t),u=Math.cos(e),i=n(r*u);return[i*u*Math.sin(t),i*Math.sin(e)]}return e.invert=function(n,e){var r=Math.sqrt(n*n+e*e),u=t(r),i=Math.sin(u),o=Math.cos(u);return[Math.atan2(n*i,r*o),Math.asin(r&&e*i/r)]},e}function _r(n,t){function e(n,t){o>0?-Ra+Ca>t&&(t=-Ra+Ca):t>Ra-Ca&&(t=Ra-Ca);var e=o/Math.pow(u(t),i);return[e*Math.sin(i*n),o-e*Math.cos(i*n)]}var r=Math.cos(n),u=function(n){return Math.tan(qa/4+n/2)},i=n===t?Math.sin(n):Math.log(r/Math.cos(t))/Math.log(u(t)/u(n)),o=r*Math.pow(u(n),i)/i;return i?(e.invert=function(n,t){var e=o-t,r=K(i)*Math.sqrt(n*n+e*e);return[Math.atan2(n,e)/i,2*Math.atan(Math.pow(o/r,1/i))-Ra]},e):Sr}function wr(n,t){function e(n,t){var e=i-t;return[e*Math.sin(u*n),i-e*Math.cos(u*n)]}var r=Math.cos(n),u=n===t?Math.sin(n):(r-Math.cos(t))/(t-n),i=r/u+n;return ga(u)u;u++){for(;r>1&&Q(n[e[r-2]],n[e[r-1]],n[u])<=0;)--r;e[r++]=u}return e.slice(0,r)}function zr(n,t){return n[0]-t[0]||n[1]-t[1]}function qr(n,t,e){return(e[0]-t[0])*(n[1]-t[1])<(e[1]-t[1])*(n[0]-t[0])}function Lr(n,t,e,r){var u=n[0],i=e[0],o=t[0]-u,a=r[0]-i,c=n[1],l=e[1],s=t[1]-c,f=r[1]-l,h=(a*(c-l)-f*(u-i))/(f*o-a*s);return[u+h*o,c+h*s]}function Tr(n){var t=n[0],e=n[n.length-1];return!(t[0]-e[0]||t[1]-e[1])}function Rr(){tu(this),this.edge=this.site=this.circle=null}function Dr(n){var t=el.pop()||new Rr;return t.site=n,t}function Pr(n){Xr(n),Qc.remove(n),el.push(n),tu(n)}function Ur(n){var t=n.circle,e=t.x,r=t.cy,u={x:e,y:r},i=n.P,o=n.N,a=[n];Pr(n);for(var c=i;c.circle&&ga(e-c.circle.x)s;++s)l=a[s],c=a[s-1],Kr(l.edge,c.site,l.site,u);c=a[0],l=a[f-1],l.edge=Jr(c.site,l.site,null,u),Vr(c),Vr(l)}function jr(n){for(var t,e,r,u,i=n.x,o=n.y,a=Qc._;a;)if(r=Fr(a,o)-i,r>Ca)a=a.L;else{if(u=i-Hr(a,o),!(u>Ca)){r>-Ca?(t=a.P,e=a):u>-Ca?(t=a,e=a.N):t=e=a;break}if(!a.R){t=a;break}a=a.R}var c=Dr(n);if(Qc.insert(t,c),t||e){if(t===e)return Xr(t),e=Dr(t.site),Qc.insert(c,e),c.edge=e.edge=Jr(t.site,c.site),Vr(t),void Vr(e);if(!e)return void(c.edge=Jr(t.site,c.site));Xr(t),Xr(e);var l=t.site,s=l.x,f=l.y,h=n.x-s,g=n.y-f,p=e.site,v=p.x-s,d=p.y-f,m=2*(h*d-g*v),y=h*h+g*g,M=v*v+d*d,x={x:(d*y-g*M)/m+s,y:(h*M-v*y)/m+f};Kr(e.edge,l,p,x),c.edge=Jr(l,n,null,x),e.edge=Jr(n,p,null,x),Vr(t),Vr(e)}}function Fr(n,t){var e=n.site,r=e.x,u=e.y,i=u-t;if(!i)return r;var o=n.P;if(!o)return-1/0;e=o.site;var a=e.x,c=e.y,l=c-t;if(!l)return a;var s=a-r,f=1/i-1/l,h=s/l;return f?(-h+Math.sqrt(h*h-2*f*(s*s/(-2*l)-c+l/2+u-i/2)))/f+r:(r+a)/2}function Hr(n,t){var e=n.N;if(e)return Fr(e,t);var r=n.site;return r.y===t?r.x:1/0}function Or(n){this.site=n,this.edges=[]}function Ir(n){for(var t,e,r,u,i,o,a,c,l,s,f=n[0][0],h=n[1][0],g=n[0][1],p=n[1][1],v=Kc,d=v.length;d--;)if(i=v[d],i&&i.prepare())for(a=i.edges,c=a.length,o=0;c>o;)s=a[o].end(),r=s.x,u=s.y,l=a[++o%c].start(),t=l.x,e=l.y,(ga(r-t)>Ca||ga(u-e)>Ca)&&(a.splice(o,0,new Qr(Gr(i.site,s,ga(r-f)Ca?{x:f,y:ga(t-f)Ca?{x:ga(e-p)Ca?{x:h,y:ga(t-h)Ca?{x:ga(e-g)=-za)){var g=c*c+l*l,p=s*s+f*f,v=(f*g-l*p)/h,d=(c*p-s*g)/h,f=d+a,m=rl.pop()||new Zr;m.arc=n,m.site=u,m.x=v+o,m.y=f+Math.sqrt(v*v+d*d),m.cy=f,n.circle=m;for(var y=null,M=tl._;M;)if(m.yd||d>=a)return;if(h>p){if(i){if(i.y>=l)return}else i={x:d,y:c};e={x:d,y:l}}else{if(i){if(i.yr||r>1)if(h>p){if(i){if(i.y>=l)return}else i={x:(c-u)/r,y:c};e={x:(l-u)/r,y:l}}else{if(i){if(i.yg){if(i){if(i.x>=a)return}else i={x:o,y:r*o+u};e={x:a,y:r*a+u}}else{if(i){if(i.xi||f>o||r>h||u>g)){if(p=n.point){var p,v=t-n.x,d=e-n.y,m=v*v+d*d;if(c>m){var y=Math.sqrt(c=m);r=t-y,u=e-y,i=t+y,o=e+y,a=p}}for(var M=n.nodes,x=.5*(s+h),b=.5*(f+g),_=t>=x,w=e>=b,S=w<<1|_,k=S+4;k>S;++S)if(n=M[3&S])switch(3&S){case 0:l(n,s,f,x,b);break;case 1:l(n,x,f,h,b);break;case 2:l(n,s,b,x,g);break;case 3:l(n,x,b,h,g)}}}(n,r,u,i,o),a}function gu(n,t){n=ta.rgb(n),t=ta.rgb(t);var e=n.r,r=n.g,u=n.b,i=t.r-e,o=t.g-r,a=t.b-u;return function(n){return"#"+xt(Math.round(e+i*n))+xt(Math.round(r+o*n))+xt(Math.round(u+a*n))}}function pu(n,t){var e,r={},u={};for(e in n)e in t?r[e]=mu(n[e],t[e]):u[e]=n[e];for(e in t)e in n||(u[e]=t[e]);return function(n){for(e in r)u[e]=r[e](n);return u}}function vu(n,t){return n=+n,t=+t,function(e){return n*(1-e)+t*e}}function du(n,t){var e,r,u,i=il.lastIndex=ol.lastIndex=0,o=-1,a=[],c=[];for(n+="",t+="";(e=il.exec(n))&&(r=ol.exec(t));)(u=r.index)>i&&(u=t.slice(i,u),a[o]?a[o]+=u:a[++o]=u),(e=e[0])===(r=r[0])?a[o]?a[o]+=r:a[++o]=r:(a[++o]=null,c.push({i:o,x:vu(e,r)})),i=ol.lastIndex;return ir;++r)a[(e=c[r]).i]=e.x(n);return a.join("")})}function mu(n,t){for(var e,r=ta.interpolators.length;--r>=0&&!(e=ta.interpolators[r](n,t)););return e}function yu(n,t){var e,r=[],u=[],i=n.length,o=t.length,a=Math.min(n.length,t.length);for(e=0;a>e;++e)r.push(mu(n[e],t[e]));for(;i>e;++e)u[e]=n[e];for(;o>e;++e)u[e]=t[e];return function(n){for(e=0;a>e;++e)u[e]=r[e](n);return u}}function Mu(n){return function(t){return 0>=t?0:t>=1?1:n(t)}}function xu(n){return function(t){return 1-n(1-t)}}function bu(n){return function(t){return.5*(.5>t?n(2*t):2-n(2-2*t))}}function _u(n){return n*n}function wu(n){return n*n*n}function Su(n){if(0>=n)return 0;if(n>=1)return 1;var t=n*n,e=t*n;return 4*(.5>n?e:3*(n-t)+e-.75)}function ku(n){return function(t){return Math.pow(t,n)}}function Eu(n){return 1-Math.cos(n*Ra)}function Au(n){return Math.pow(2,10*(n-1))}function Nu(n){return 1-Math.sqrt(1-n*n)}function Cu(n,t){var e;return arguments.length<2&&(t=.45),arguments.length?e=t/La*Math.asin(1/n):(n=1,e=t/4),function(r){return 1+n*Math.pow(2,-10*r)*Math.sin((r-e)*La/t)}}function zu(n){return n||(n=1.70158),function(t){return t*t*((n+1)*t-n)}}function qu(n){return 1/2.75>n?7.5625*n*n:2/2.75>n?7.5625*(n-=1.5/2.75)*n+.75:2.5/2.75>n?7.5625*(n-=2.25/2.75)*n+.9375:7.5625*(n-=2.625/2.75)*n+.984375}function Lu(n,t){n=ta.hcl(n),t=ta.hcl(t);var e=n.h,r=n.c,u=n.l,i=t.h-e,o=t.c-r,a=t.l-u;return isNaN(o)&&(o=0,r=isNaN(r)?t.c:r),isNaN(i)?(i=0,e=isNaN(e)?t.h:e):i>180?i-=360:-180>i&&(i+=360),function(n){return st(e+i*n,r+o*n,u+a*n)+""}}function Tu(n,t){n=ta.hsl(n),t=ta.hsl(t);var e=n.h,r=n.s,u=n.l,i=t.h-e,o=t.s-r,a=t.l-u;return isNaN(o)&&(o=0,r=isNaN(r)?t.s:r),isNaN(i)?(i=0,e=isNaN(e)?t.h:e):i>180?i-=360:-180>i&&(i+=360),function(n){return ct(e+i*n,r+o*n,u+a*n)+""}}function Ru(n,t){n=ta.lab(n),t=ta.lab(t);var e=n.l,r=n.a,u=n.b,i=t.l-e,o=t.a-r,a=t.b-u;return function(n){return ht(e+i*n,r+o*n,u+a*n)+""}}function Du(n,t){return t-=n,function(e){return Math.round(n+t*e)}}function Pu(n){var t=[n.a,n.b],e=[n.c,n.d],r=ju(t),u=Uu(t,e),i=ju(Fu(e,t,-u))||0;t[0]*e[1]180?s+=360:s-l>180&&(l+=360),u.push({i:r.push(r.pop()+"rotate(",null,")")-2,x:vu(l,s)})):s&&r.push(r.pop()+"rotate("+s+")"),f!=h?u.push({i:r.push(r.pop()+"skewX(",null,")")-2,x:vu(f,h)}):h&&r.push(r.pop()+"skewX("+h+")"),g[0]!=p[0]||g[1]!=p[1]?(e=r.push(r.pop()+"scale(",null,",",null,")"),u.push({i:e-4,x:vu(g[0],p[0])},{i:e-2,x:vu(g[1],p[1])})):(1!=p[0]||1!=p[1])&&r.push(r.pop()+"scale("+p+")"),e=u.length,function(n){for(var t,i=-1;++i=0;)e.push(u[r])}function Qu(n,t){for(var e=[n],r=[];null!=(n=e.pop());)if(r.push(n),(i=n.children)&&(u=i.length))for(var u,i,o=-1;++oe;++e)(t=n[e][1])>u&&(r=e,u=t);return r}function si(n){return n.reduce(fi,0)}function fi(n,t){return n+t[1]}function hi(n,t){return gi(n,Math.ceil(Math.log(t.length)/Math.LN2+1))}function gi(n,t){for(var e=-1,r=+n[0],u=(n[1]-r)/t,i=[];++e<=t;)i[e]=u*e+r;return i}function pi(n){return[ta.min(n),ta.max(n)]}function vi(n,t){return n.value-t.value}function di(n,t){var e=n._pack_next;n._pack_next=t,t._pack_prev=n,t._pack_next=e,e._pack_prev=t}function mi(n,t){n._pack_next=t,t._pack_prev=n}function yi(n,t){var e=t.x-n.x,r=t.y-n.y,u=n.r+t.r;return.999*u*u>e*e+r*r}function Mi(n){function t(n){s=Math.min(n.x-n.r,s),f=Math.max(n.x+n.r,f),h=Math.min(n.y-n.r,h),g=Math.max(n.y+n.r,g)}if((e=n.children)&&(l=e.length)){var e,r,u,i,o,a,c,l,s=1/0,f=-1/0,h=1/0,g=-1/0;if(e.forEach(xi),r=e[0],r.x=-r.r,r.y=0,t(r),l>1&&(u=e[1],u.x=u.r,u.y=0,t(u),l>2))for(i=e[2],wi(r,u,i),t(i),di(r,i),r._pack_prev=i,di(i,u),u=r._pack_next,o=3;l>o;o++){wi(r,u,i=e[o]);var p=0,v=1,d=1;for(a=u._pack_next;a!==u;a=a._pack_next,v++)if(yi(a,i)){p=1;break}if(1==p)for(c=r._pack_prev;c!==a._pack_prev&&!yi(c,i);c=c._pack_prev,d++);p?(d>v||v==d&&u.ro;o++)i=e[o],i.x-=m,i.y-=y,M=Math.max(M,i.r+Math.sqrt(i.x*i.x+i.y*i.y));n.r=M,e.forEach(bi)}}function xi(n){n._pack_next=n._pack_prev=n}function bi(n){delete n._pack_next,delete n._pack_prev}function _i(n,t,e,r){var u=n.children;if(n.x=t+=r*n.x,n.y=e+=r*n.y,n.r*=r,u)for(var i=-1,o=u.length;++i=0;)t=u[i],t.z+=e,t.m+=e,e+=t.s+(r+=t.c)}function Ci(n,t,e){return n.a.parent===t.parent?n.a:e}function zi(n){return 1+ta.max(n,function(n){return n.y})}function qi(n){return n.reduce(function(n,t){return n+t.x},0)/n.length}function Li(n){var t=n.children;return t&&t.length?Li(t[0]):n}function Ti(n){var t,e=n.children;return e&&(t=e.length)?Ti(e[t-1]):n}function Ri(n){return{x:n.x,y:n.y,dx:n.dx,dy:n.dy}}function Di(n,t){var e=n.x+t[3],r=n.y+t[0],u=n.dx-t[1]-t[3],i=n.dy-t[0]-t[2];return 0>u&&(e+=u/2,u=0),0>i&&(r+=i/2,i=0),{x:e,y:r,dx:u,dy:i}}function Pi(n){var t=n[0],e=n[n.length-1];return e>t?[t,e]:[e,t]}function Ui(n){return n.rangeExtent?n.rangeExtent():Pi(n.range())}function ji(n,t,e,r){var u=e(n[0],n[1]),i=r(t[0],t[1]);return function(n){return i(u(n))}}function Fi(n,t){var e,r=0,u=n.length-1,i=n[r],o=n[u];return i>o&&(e=r,r=u,u=e,e=i,i=o,o=e),n[r]=t.floor(i),n[u]=t.ceil(o),n}function Hi(n){return n?{floor:function(t){return Math.floor(t/n)*n},ceil:function(t){return Math.ceil(t/n)*n}}:ml}function Oi(n,t,e,r){var u=[],i=[],o=0,a=Math.min(n.length,t.length)-1;for(n[a]2?Oi:ji,c=r?Iu:Ou;return o=u(n,t,c,e),a=u(t,n,c,mu),i}function i(n){return o(n)}var o,a;return i.invert=function(n){return a(n)},i.domain=function(t){return arguments.length?(n=t.map(Number),u()):n},i.range=function(n){return arguments.length?(t=n,u()):t},i.rangeRound=function(n){return i.range(n).interpolate(Du)},i.clamp=function(n){return arguments.length?(r=n,u()):r},i.interpolate=function(n){return arguments.length?(e=n,u()):e},i.ticks=function(t){return Xi(n,t)},i.tickFormat=function(t,e){return $i(n,t,e)},i.nice=function(t){return Zi(n,t),u()},i.copy=function(){return Ii(n,t,e,r)},u()}function Yi(n,t){return ta.rebind(n,t,"range","rangeRound","interpolate","clamp")}function Zi(n,t){return Fi(n,Hi(Vi(n,t)[2]))}function Vi(n,t){null==t&&(t=10);var e=Pi(n),r=e[1]-e[0],u=Math.pow(10,Math.floor(Math.log(r/t)/Math.LN10)),i=t/r*u;return.15>=i?u*=10:.35>=i?u*=5:.75>=i&&(u*=2),e[0]=Math.ceil(e[0]/u)*u,e[1]=Math.floor(e[1]/u)*u+.5*u,e[2]=u,e}function Xi(n,t){return ta.range.apply(ta,Vi(n,t))}function $i(n,t,e){var r=Vi(n,t);if(e){var u=ic.exec(e);if(u.shift(),"s"===u[8]){var i=ta.formatPrefix(Math.max(ga(r[0]),ga(r[1])));return u[7]||(u[7]="."+Bi(i.scale(r[2]))),u[8]="f",e=ta.format(u.join("")),function(n){return e(i.scale(n))+i.symbol}}u[7]||(u[7]="."+Wi(u[8],r)),e=u.join("")}else e=",."+Bi(r[2])+"f";return ta.format(e)}function Bi(n){return-Math.floor(Math.log(n)/Math.LN10+.01)}function Wi(n,t){var e=Bi(t[2]);return n in yl?Math.abs(e-Bi(Math.max(ga(t[0]),ga(t[1]))))+ +("e"!==n):e-2*("%"===n)}function Ji(n,t,e,r){function u(n){return(e?Math.log(0>n?0:n):-Math.log(n>0?0:-n))/Math.log(t)}function i(n){return e?Math.pow(t,n):-Math.pow(t,-n)}function o(t){return n(u(t))}return o.invert=function(t){return i(n.invert(t))},o.domain=function(t){return arguments.length?(e=t[0]>=0,n.domain((r=t.map(Number)).map(u)),o):r},o.base=function(e){return arguments.length?(t=+e,n.domain(r.map(u)),o):t},o.nice=function(){var t=Fi(r.map(u),e?Math:xl);return n.domain(t),r=t.map(i),o},o.ticks=function(){var n=Pi(r),o=[],a=n[0],c=n[1],l=Math.floor(u(a)),s=Math.ceil(u(c)),f=t%1?2:t;if(isFinite(s-l)){if(e){for(;s>l;l++)for(var h=1;f>h;h++)o.push(i(l)*h);o.push(i(l))}else for(o.push(i(l));l++0;h--)o.push(i(l)*h);for(l=0;o[l]c;s--);o=o.slice(l,s)}return o},o.tickFormat=function(n,t){if(!arguments.length)return Ml;arguments.length<2?t=Ml:"function"!=typeof t&&(t=ta.format(t));var r,a=Math.max(.1,n/o.ticks().length),c=e?(r=1e-12,Math.ceil):(r=-1e-12,Math.floor);return function(n){return n/i(c(u(n)+r))<=a?t(n):""}},o.copy=function(){return Ji(n.copy(),t,e,r)},Yi(o,n)}function Gi(n,t,e){function r(t){return n(u(t))}var u=Ki(t),i=Ki(1/t);return r.invert=function(t){return i(n.invert(t))},r.domain=function(t){return arguments.length?(n.domain((e=t.map(Number)).map(u)),r):e},r.ticks=function(n){return Xi(e,n)},r.tickFormat=function(n,t){return $i(e,n,t)},r.nice=function(n){return r.domain(Zi(e,n))},r.exponent=function(o){return arguments.length?(u=Ki(t=o),i=Ki(1/t),n.domain(e.map(u)),r):t},r.copy=function(){return Gi(n.copy(),t,e)},Yi(r,n)}function Ki(n){return function(t){return 0>t?-Math.pow(-t,n):Math.pow(t,n)}}function Qi(n,t){function e(e){return i[((u.get(e)||("range"===t.t?u.set(e,n.push(e)):0/0))-1)%i.length]}function r(t,e){return ta.range(n.length).map(function(n){return t+e*n})}var u,i,o;return e.domain=function(r){if(!arguments.length)return n;n=[],u=new l;for(var i,o=-1,a=r.length;++oe?[0/0,0/0]:[e>0?a[e-1]:n[0],et?0/0:t/i+n,[t,t+1/i]},r.copy=function(){return to(n,t,e)},u()}function eo(n,t){function e(e){return e>=e?t[ta.bisect(n,e)]:void 0}return e.domain=function(t){return arguments.length?(n=t,e):n},e.range=function(n){return arguments.length?(t=n,e):t},e.invertExtent=function(e){return e=t.indexOf(e),[n[e-1],n[e]]},e.copy=function(){return eo(n,t)},e}function ro(n){function t(n){return+n}return t.invert=t,t.domain=t.range=function(e){return arguments.length?(n=e.map(t),t):n},t.ticks=function(t){return Xi(n,t)},t.tickFormat=function(t,e){return $i(n,t,e)},t.copy=function(){return ro(n)},t}function uo(){return 0}function io(n){return n.innerRadius}function oo(n){return n.outerRadius}function ao(n){return n.startAngle}function co(n){return n.endAngle}function lo(n){return n&&n.padAngle}function so(n,t,e,r){return(n-e)*t-(t-r)*n>0?0:1}function fo(n,t,e,r,u){var i=n[0]-t[0],o=n[1]-t[1],a=(u?r:-r)/Math.sqrt(i*i+o*o),c=a*o,l=-a*i,s=n[0]+c,f=n[1]+l,h=t[0]+c,g=t[1]+l,p=(s+h)/2,v=(f+g)/2,d=h-s,m=g-f,y=d*d+m*m,M=e-r,x=s*g-h*f,b=(0>m?-1:1)*Math.sqrt(M*M*y-x*x),_=(x*m-d*b)/y,w=(-x*d-m*b)/y,S=(x*m+d*b)/y,k=(-x*d+m*b)/y,E=_-p,A=w-v,N=S-p,C=k-v;return E*E+A*A>N*N+C*C&&(_=S,w=k),[[_-c,w-l],[_*e/M,w*e/M]]}function ho(n){function t(t){function o(){l.push("M",i(n(s),a))}for(var c,l=[],s=[],f=-1,h=t.length,g=Et(e),p=Et(r);++f1&&u.push("H",r[0]),u.join("")}function mo(n){for(var t=0,e=n.length,r=n[0],u=[r[0],",",r[1]];++t1){a=t[1],i=n[c],c++,r+="C"+(u[0]+o[0])+","+(u[1]+o[1])+","+(i[0]-a[0])+","+(i[1]-a[1])+","+i[0]+","+i[1];for(var l=2;l9&&(u=3*t/Math.sqrt(u),o[a]=u*e,o[a+1]=u*r));for(a=-1;++a<=c;)u=(n[Math.min(c,a+1)][0]-n[Math.max(0,a-1)][0])/(6*(1+o[a]*o[a])),i.push([u||0,o[a]*u||0]);return i}function To(n){return n.length<3?go(n):n[0]+_o(n,Lo(n))}function Ro(n){for(var t,e,r,u=-1,i=n.length;++ur)return s();var u=i[i.active];u&&(--i.count,delete i[i.active],u.event&&u.event.interrupt.call(n,n.__data__,u.index)),i.active=r,o.event&&o.event.start.call(n,n.__data__,t),o.tween.forEach(function(e,r){(r=r.call(n,n.__data__,t))&&v.push(r)}),h=o.ease,f=o.duration,ta.timer(function(){return p.c=l(e||1)?Ne:l,1},0,a)}function l(e){if(i.active!==r)return 1;for(var u=e/f,a=h(u),c=v.length;c>0;)v[--c].call(n,a);return u>=1?(o.event&&o.event.end.call(n,n.__data__,t),s()):void 0}function s(){return--i.count?delete i[r]:delete n[e],1}var f,h,g=o.delay,p=ec,v=[];return p.t=g+a,u>=g?c(u-g):void(p.c=c)},0,a)}}function Bo(n,t,e){n.attr("transform",function(n){var r=t(n);return"translate("+(isFinite(r)?r:e(n))+",0)"})}function Wo(n,t,e){n.attr("transform",function(n){var r=t(n);return"translate(0,"+(isFinite(r)?r:e(n))+")"})}function Jo(n){return n.toISOString()}function Go(n,t,e){function r(t){return n(t)}function u(n,e){var r=n[1]-n[0],u=r/e,i=ta.bisect(Vl,u);return i==Vl.length?[t.year,Vi(n.map(function(n){return n/31536e6}),e)[2]]:i?t[u/Vl[i-1]1?{floor:function(t){for(;e(t=n.floor(t));)t=Ko(t-1);return t},ceil:function(t){for(;e(t=n.ceil(t));)t=Ko(+t+1);return t}}:n))},r.ticks=function(n,t){var e=Pi(r.domain()),i=null==n?u(e,10):"number"==typeof n?u(e,n):!n.range&&[{range:n},t];return i&&(n=i[0],t=i[1]),n.range(e[0],Ko(+e[1]+1),1>t?1:t)},r.tickFormat=function(){return e},r.copy=function(){return Go(n.copy(),t,e)},Yi(r,n)}function Ko(n){return new Date(n)}function Qo(n){return JSON.parse(n.responseText)}function na(n){var t=ua.createRange();return t.selectNode(ua.body),t.createContextualFragment(n.responseText)}var ta={version:"3.5.6"},ea=[].slice,ra=function(n){return ea.call(n)},ua=this.document;if(ua)try{ra(ua.documentElement.childNodes)[0].nodeType}catch(ia){ra=function(n){for(var t=n.length,e=new Array(t);t--;)e[t]=n[t];return e}}if(Date.now||(Date.now=function(){return+new Date}),ua)try{ua.createElement("DIV").style.setProperty("opacity",0,"")}catch(oa){var aa=this.Element.prototype,ca=aa.setAttribute,la=aa.setAttributeNS,sa=this.CSSStyleDeclaration.prototype,fa=sa.setProperty;aa.setAttribute=function(n,t){ca.call(this,n,t+"")},aa.setAttributeNS=function(n,t,e){la.call(this,n,t,e+"")},sa.setProperty=function(n,t,e){fa.call(this,n,t+"",e)}}ta.ascending=e,ta.descending=function(n,t){return n>t?-1:t>n?1:t>=n?0:0/0},ta.min=function(n,t){var e,r,u=-1,i=n.length;if(1===arguments.length){for(;++u=r){e=r;break}for(;++ur&&(e=r)}else{for(;++u=r){e=r;break}for(;++ur&&(e=r)}return e},ta.max=function(n,t){var e,r,u=-1,i=n.length;if(1===arguments.length){for(;++u=r){e=r;break}for(;++ue&&(e=r)}else{for(;++u=r){e=r;break}for(;++ue&&(e=r)}return e},ta.extent=function(n,t){var e,r,u,i=-1,o=n.length;if(1===arguments.length){for(;++i=r){e=u=r;break}for(;++ir&&(e=r),r>u&&(u=r))}else{for(;++i=r){e=u=r;break}for(;++ir&&(e=r),r>u&&(u=r))}return[e,u]},ta.sum=function(n,t){var e,r=0,i=n.length,o=-1;if(1===arguments.length)for(;++o1?c/(s-1):void 0},ta.deviation=function(){var n=ta.variance.apply(this,arguments);return n?Math.sqrt(n):n};var ha=i(e);ta.bisectLeft=ha.left,ta.bisect=ta.bisectRight=ha.right,ta.bisector=function(n){return i(1===n.length?function(t,r){return e(n(t),r)}:n)},ta.shuffle=function(n,t,e){(i=arguments.length)<3&&(e=n.length,2>i&&(t=0));for(var r,u,i=e-t;i;)u=Math.random()*i--|0,r=n[i+t],n[i+t]=n[u+t],n[u+t]=r;return n},ta.permute=function(n,t){for(var e=t.length,r=new Array(e);e--;)r[e]=n[t[e]];return r},ta.pairs=function(n){for(var t,e=0,r=n.length-1,u=n[0],i=new Array(0>r?0:r);r>e;)i[e]=[t=u,u=n[++e]];return i},ta.zip=function(){if(!(r=arguments.length))return[];for(var n=-1,t=ta.min(arguments,o),e=new Array(t);++n=0;)for(r=n[u],t=r.length;--t>=0;)e[--o]=r[t];return e};var ga=Math.abs;ta.range=function(n,t,e){if(arguments.length<3&&(e=1,arguments.length<2&&(t=n,n=0)),(t-n)/e===1/0)throw new Error("infinite range");var r,u=[],i=a(ga(e)),o=-1;if(n*=i,t*=i,e*=i,0>e)for(;(r=n+e*++o)>t;)u.push(r/i);else for(;(r=n+e*++o)=i.length)return r?r.call(u,o):e?o.sort(e):o;for(var c,s,f,h,g=-1,p=o.length,v=i[a++],d=new l;++g=i.length)return n;var r=[],u=o[e++];return n.forEach(function(n,u){r.push({key:n,values:t(u,e)})}),u?r.sort(function(n,t){return u(n.key,t.key)}):r}var e,r,u={},i=[],o=[];return u.map=function(t,e){return n(e,t,0)},u.entries=function(e){return t(n(ta.map,e,0),0)},u.key=function(n){return i.push(n),u},u.sortKeys=function(n){return o[i.length-1]=n,u},u.sortValues=function(n){return e=n,u},u.rollup=function(n){return r=n,u},u},ta.set=function(n){var t=new m;if(n)for(var e=0,r=n.length;r>e;++e)t.add(n[e]);return t},c(m,{has:h,add:function(n){return this._[s(n+="")]=!0,n},remove:g,values:p,size:v,empty:d,forEach:function(n){for(var t in this._)n.call(this,f(t))}}),ta.behavior={},ta.rebind=function(n,t){for(var e,r=1,u=arguments.length;++r=0&&(r=n.slice(e+1),n=n.slice(0,e)),n)return arguments.length<2?this[n].on(r):this[n].on(r,t);if(2===arguments.length){if(null==t)for(n in this)this.hasOwnProperty(n)&&this[n].on(r,null);return this}},ta.event=null,ta.requote=function(n){return n.replace(ma,"\\$&")};var ma=/[\\\^\$\*\+\?\|\[\]\(\)\.\{\}]/g,ya={}.__proto__?function(n,t){n.__proto__=t}:function(n,t){for(var e in t)n[e]=t[e]},Ma=function(n,t){return t.querySelector(n)},xa=function(n,t){return t.querySelectorAll(n)},ba=function(n,t){var e=n.matches||n[x(n,"matchesSelector")];return(ba=function(n,t){return e.call(n,t)})(n,t)};"function"==typeof Sizzle&&(Ma=function(n,t){return Sizzle(n,t)[0]||null},xa=Sizzle,ba=Sizzle.matchesSelector),ta.selection=function(){return ta.select(ua.documentElement)};var _a=ta.selection.prototype=[];_a.select=function(n){var t,e,r,u,i=[];n=N(n);for(var o=-1,a=this.length;++o=0&&(e=n.slice(0,t),n=n.slice(t+1)),wa.hasOwnProperty(e)?{space:wa[e],local:n}:n}},_a.attr=function(n,t){if(arguments.length<2){if("string"==typeof n){var e=this.node();return n=ta.ns.qualify(n),n.local?e.getAttributeNS(n.space,n.local):e.getAttribute(n)}for(t in n)this.each(z(t,n[t]));return this}return this.each(z(n,t))},_a.classed=function(n,t){if(arguments.length<2){if("string"==typeof n){var e=this.node(),r=(n=T(n)).length,u=-1;if(t=e.classList){for(;++uu){if("string"!=typeof n){2>u&&(e="");for(r in n)this.each(P(r,n[r],e));return this}if(2>u){var i=this.node();return t(i).getComputedStyle(i,null).getPropertyValue(n)}r=""}return this.each(P(n,e,r))},_a.property=function(n,t){if(arguments.length<2){if("string"==typeof n)return this.node()[n];for(t in n)this.each(U(t,n[t]));return this}return this.each(U(n,t))},_a.text=function(n){return arguments.length?this.each("function"==typeof n?function(){var t=n.apply(this,arguments);this.textContent=null==t?"":t}:null==n?function(){this.textContent=""}:function(){this.textContent=n}):this.node().textContent},_a.html=function(n){return arguments.length?this.each("function"==typeof n?function(){var t=n.apply(this,arguments);this.innerHTML=null==t?"":t}:null==n?function(){this.innerHTML=""}:function(){this.innerHTML=n}):this.node().innerHTML},_a.append=function(n){return n=j(n),this.select(function(){return this.appendChild(n.apply(this,arguments))})},_a.insert=function(n,t){return n=j(n),t=N(t),this.select(function(){return this.insertBefore(n.apply(this,arguments),t.apply(this,arguments)||null)})},_a.remove=function(){return this.each(F)},_a.data=function(n,t){function e(n,e){var r,u,i,o=n.length,f=e.length,h=Math.min(o,f),g=new Array(f),p=new Array(f),v=new Array(o);if(t){var d,m=new l,y=new Array(o);for(r=-1;++rr;++r)p[r]=H(e[r]);for(;o>r;++r)v[r]=n[r]}p.update=g,p.parentNode=g.parentNode=v.parentNode=n.parentNode,a.push(p),c.push(g),s.push(v)}var r,u,i=-1,o=this.length;if(!arguments.length){for(n=new Array(o=(r=this[0]).length);++ii;i++){u.push(t=[]),t.parentNode=(e=this[i]).parentNode;for(var a=0,c=e.length;c>a;a++)(r=e[a])&&n.call(r,r.__data__,a,i)&&t.push(r)}return A(u)},_a.order=function(){for(var n=-1,t=this.length;++n=0;)(e=r[u])&&(i&&i!==e.nextSibling&&i.parentNode.insertBefore(e,i),i=e);return this},_a.sort=function(n){n=I.apply(this,arguments);for(var t=-1,e=this.length;++tn;n++)for(var e=this[n],r=0,u=e.length;u>r;r++){var i=e[r];if(i)return i}return null},_a.size=function(){var n=0;return Y(this,function(){++n}),n};var Sa=[];ta.selection.enter=Z,ta.selection.enter.prototype=Sa,Sa.append=_a.append,Sa.empty=_a.empty,Sa.node=_a.node,Sa.call=_a.call,Sa.size=_a.size,Sa.select=function(n){for(var t,e,r,u,i,o=[],a=-1,c=this.length;++ar){if("string"!=typeof n){2>r&&(t=!1);for(e in n)this.each(X(e,n[e],t));return this}if(2>r)return(r=this.node()["__on"+n])&&r._;e=!1}return this.each(X(n,t,e))};var ka=ta.map({mouseenter:"mouseover",mouseleave:"mouseout"});ua&&ka.forEach(function(n){"on"+n in ua&&ka.remove(n)});var Ea,Aa=0;ta.mouse=function(n){return J(n,k())};var Na=this.navigator&&/WebKit/.test(this.navigator.userAgent)?-1:0;ta.touch=function(n,t,e){if(arguments.length<3&&(e=t,t=k().changedTouches),t)for(var r,u=0,i=t.length;i>u;++u)if((r=t[u]).identifier===e)return J(n,r)},ta.behavior.drag=function(){function n(){this.on("mousedown.drag",i).on("touchstart.drag",o)}function e(n,t,e,i,o){return function(){function a(){var n,e,r=t(h,v);r&&(n=r[0]-M[0],e=r[1]-M[1],p|=n|e,M=r,g({type:"drag",x:r[0]+l[0],y:r[1]+l[1],dx:n,dy:e}))}function c(){t(h,v)&&(m.on(i+d,null).on(o+d,null),y(p&&ta.event.target===f),g({type:"dragend"}))}var l,s=this,f=ta.event.target,h=s.parentNode,g=r.of(s,arguments),p=0,v=n(),d=".drag"+(null==v?"":"-"+v),m=ta.select(e(f)).on(i+d,a).on(o+d,c),y=W(f),M=t(h,v);u?(l=u.apply(s,arguments),l=[l.x-M[0],l.y-M[1]]):l=[0,0],g({type:"dragstart"})}}var r=E(n,"drag","dragstart","dragend"),u=null,i=e(b,ta.mouse,t,"mousemove","mouseup"),o=e(G,ta.touch,y,"touchmove","touchend");return n.origin=function(t){return arguments.length?(u=t,n):u},ta.rebind(n,r,"on")},ta.touches=function(n,t){return arguments.length<2&&(t=k().touches),t?ra(t).map(function(t){var e=J(n,t);return e.identifier=t.identifier,e}):[]};var Ca=1e-6,za=Ca*Ca,qa=Math.PI,La=2*qa,Ta=La-Ca,Ra=qa/2,Da=qa/180,Pa=180/qa,Ua=Math.SQRT2,ja=2,Fa=4;ta.interpolateZoom=function(n,t){function e(n){var t=n*y;if(m){var e=rt(v),o=i/(ja*h)*(e*ut(Ua*t+v)-et(v));return[r+o*l,u+o*s,i*e/rt(Ua*t+v)]}return[r+n*l,u+n*s,i*Math.exp(Ua*t)]}var r=n[0],u=n[1],i=n[2],o=t[0],a=t[1],c=t[2],l=o-r,s=a-u,f=l*l+s*s,h=Math.sqrt(f),g=(c*c-i*i+Fa*f)/(2*i*ja*h),p=(c*c-i*i-Fa*f)/(2*c*ja*h),v=Math.log(Math.sqrt(g*g+1)-g),d=Math.log(Math.sqrt(p*p+1)-p),m=d-v,y=(m||Math.log(c/i))/Ua;return e.duration=1e3*y,e},ta.behavior.zoom=function(){function n(n){n.on(q,f).on(Oa+".zoom",g).on("dblclick.zoom",p).on(R,h)}function e(n){return[(n[0]-k.x)/k.k,(n[1]-k.y)/k.k]}function r(n){return[n[0]*k.k+k.x,n[1]*k.k+k.y]}function u(n){k.k=Math.max(N[0],Math.min(N[1],n))}function i(n,t){t=r(t),k.x+=n[0]-t[0],k.y+=n[1]-t[1]}function o(t,e,r,o){t.__chart__={x:k.x,y:k.y,k:k.k},u(Math.pow(2,o)),i(d=e,r),t=ta.select(t),C>0&&(t=t.transition().duration(C)),t.call(n.event)}function a(){b&&b.domain(x.range().map(function(n){return(n-k.x)/k.k}).map(x.invert)),w&&w.domain(_.range().map(function(n){return(n-k.y)/k.k}).map(_.invert))}function c(n){z++||n({type:"zoomstart"})}function l(n){a(),n({type:"zoom",scale:k.k,translate:[k.x,k.y]})}function s(n){--z||(n({type:"zoomend"}),d=null)}function f(){function n(){f=1,i(ta.mouse(u),g),l(a)}function r(){h.on(L,null).on(T,null),p(f&&ta.event.target===o),s(a)}var u=this,o=ta.event.target,a=D.of(u,arguments),f=0,h=ta.select(t(u)).on(L,n).on(T,r),g=e(ta.mouse(u)),p=W(u);Dl.call(u),c(a)}function h(){function n(){var n=ta.touches(p);return g=k.k,n.forEach(function(n){n.identifier in d&&(d[n.identifier]=e(n))}),n}function t(){var t=ta.event.target;ta.select(t).on(x,r).on(b,a),_.push(t);for(var e=ta.event.changedTouches,u=0,i=e.length;i>u;++u)d[e[u].identifier]=null;var c=n(),l=Date.now();if(1===c.length){if(500>l-M){var s=c[0];o(p,s,d[s.identifier],Math.floor(Math.log(k.k)/Math.LN2)+1),S()}M=l}else if(c.length>1){var s=c[0],f=c[1],h=s[0]-f[0],g=s[1]-f[1];m=h*h+g*g}}function r(){var n,t,e,r,o=ta.touches(p);Dl.call(p);for(var a=0,c=o.length;c>a;++a,r=null)if(e=o[a],r=d[e.identifier]){if(t)break;n=e,t=r}if(r){var s=(s=e[0]-n[0])*s+(s=e[1]-n[1])*s,f=m&&Math.sqrt(s/m);n=[(n[0]+e[0])/2,(n[1]+e[1])/2],t=[(t[0]+r[0])/2,(t[1]+r[1])/2],u(f*g)}M=null,i(n,t),l(v)}function a(){if(ta.event.touches.length){for(var t=ta.event.changedTouches,e=0,r=t.length;r>e;++e)delete d[t[e].identifier];for(var u in d)return void n()}ta.selectAll(_).on(y,null),w.on(q,f).on(R,h),E(),s(v)}var g,p=this,v=D.of(p,arguments),d={},m=0,y=".zoom-"+ta.event.changedTouches[0].identifier,x="touchmove"+y,b="touchend"+y,_=[],w=ta.select(p),E=W(p);t(),c(v),w.on(q,null).on(R,t)}function g(){var n=D.of(this,arguments);y?clearTimeout(y):(Dl.call(this),v=e(d=m||ta.mouse(this)),c(n)),y=setTimeout(function(){y=null,s(n)},50),S(),u(Math.pow(2,.002*Ha())*k.k),i(d,v),l(n)}function p(){var n=ta.mouse(this),t=Math.log(k.k)/Math.LN2;o(this,n,e(n),ta.event.shiftKey?Math.ceil(t)-1:Math.floor(t)+1)}var v,d,m,y,M,x,b,_,w,k={x:0,y:0,k:1},A=[960,500],N=Ia,C=250,z=0,q="mousedown.zoom",L="mousemove.zoom",T="mouseup.zoom",R="touchstart.zoom",D=E(n,"zoomstart","zoom","zoomend");return Oa||(Oa="onwheel"in ua?(Ha=function(){return-ta.event.deltaY*(ta.event.deltaMode?120:1)},"wheel"):"onmousewheel"in ua?(Ha=function(){return ta.event.wheelDelta},"mousewheel"):(Ha=function(){return-ta.event.detail},"MozMousePixelScroll")),n.event=function(n){n.each(function(){var n=D.of(this,arguments),t=k;Tl?ta.select(this).transition().each("start.zoom",function(){k=this.__chart__||{x:0,y:0,k:1},c(n)}).tween("zoom:zoom",function(){var e=A[0],r=A[1],u=d?d[0]:e/2,i=d?d[1]:r/2,o=ta.interpolateZoom([(u-k.x)/k.k,(i-k.y)/k.k,e/k.k],[(u-t.x)/t.k,(i-t.y)/t.k,e/t.k]);return function(t){var r=o(t),a=e/r[2];this.__chart__=k={x:u-r[0]*a,y:i-r[1]*a,k:a},l(n)}}).each("interrupt.zoom",function(){s(n)}).each("end.zoom",function(){s(n)}):(this.__chart__=k,c(n),l(n),s(n))})},n.translate=function(t){return arguments.length?(k={x:+t[0],y:+t[1],k:k.k},a(),n):[k.x,k.y]},n.scale=function(t){return arguments.length?(k={x:k.x,y:k.y,k:+t},a(),n):k.k},n.scaleExtent=function(t){return arguments.length?(N=null==t?Ia:[+t[0],+t[1]],n):N},n.center=function(t){return arguments.length?(m=t&&[+t[0],+t[1]],n):m},n.size=function(t){return arguments.length?(A=t&&[+t[0],+t[1]],n):A},n.duration=function(t){return arguments.length?(C=+t,n):C},n.x=function(t){return arguments.length?(b=t,x=t.copy(),k={x:0,y:0,k:1},n):b},n.y=function(t){return arguments.length?(w=t,_=t.copy(),k={x:0,y:0,k:1},n):w},ta.rebind(n,D,"on")};var Ha,Oa,Ia=[0,1/0];ta.color=ot,ot.prototype.toString=function(){return this.rgb()+""},ta.hsl=at;var Ya=at.prototype=new ot;Ya.brighter=function(n){return n=Math.pow(.7,arguments.length?n:1),new at(this.h,this.s,this.l/n)},Ya.darker=function(n){return n=Math.pow(.7,arguments.length?n:1),new at(this.h,this.s,n*this.l)},Ya.rgb=function(){return ct(this.h,this.s,this.l)},ta.hcl=lt;var Za=lt.prototype=new ot;Za.brighter=function(n){return new lt(this.h,this.c,Math.min(100,this.l+Va*(arguments.length?n:1)))},Za.darker=function(n){return new lt(this.h,this.c,Math.max(0,this.l-Va*(arguments.length?n:1)))},Za.rgb=function(){return st(this.h,this.c,this.l).rgb()},ta.lab=ft;var Va=18,Xa=.95047,$a=1,Ba=1.08883,Wa=ft.prototype=new ot;Wa.brighter=function(n){return new ft(Math.min(100,this.l+Va*(arguments.length?n:1)),this.a,this.b)},Wa.darker=function(n){return new ft(Math.max(0,this.l-Va*(arguments.length?n:1)),this.a,this.b)},Wa.rgb=function(){return ht(this.l,this.a,this.b)},ta.rgb=mt;var Ja=mt.prototype=new ot;Ja.brighter=function(n){n=Math.pow(.7,arguments.length?n:1);var t=this.r,e=this.g,r=this.b,u=30;return t||e||r?(t&&u>t&&(t=u),e&&u>e&&(e=u),r&&u>r&&(r=u),new mt(Math.min(255,t/n),Math.min(255,e/n),Math.min(255,r/n))):new mt(u,u,u)},Ja.darker=function(n){return n=Math.pow(.7,arguments.length?n:1),new mt(n*this.r,n*this.g,n*this.b)},Ja.hsl=function(){return _t(this.r,this.g,this.b)},Ja.toString=function(){return"#"+xt(this.r)+xt(this.g)+xt(this.b)};var Ga=ta.map({aliceblue:15792383,antiquewhite:16444375,aqua:65535,aquamarine:8388564,azure:15794175,beige:16119260,bisque:16770244,black:0,blanchedalmond:16772045,blue:255,blueviolet:9055202,brown:10824234,burlywood:14596231,cadetblue:6266528,chartreuse:8388352,chocolate:13789470,coral:16744272,cornflowerblue:6591981,cornsilk:16775388,crimson:14423100,cyan:65535,darkblue:139,darkcyan:35723,darkgoldenrod:12092939,darkgray:11119017,darkgreen:25600,darkgrey:11119017,darkkhaki:12433259,darkmagenta:9109643,darkolivegreen:5597999,darkorange:16747520,darkorchid:10040012,darkred:9109504,darksalmon:15308410,darkseagreen:9419919,darkslateblue:4734347,darkslategray:3100495,darkslategrey:3100495,darkturquoise:52945,darkviolet:9699539,deeppink:16716947,deepskyblue:49151,dimgray:6908265,dimgrey:6908265,dodgerblue:2003199,firebrick:11674146,floralwhite:16775920,forestgreen:2263842,fuchsia:16711935,gainsboro:14474460,ghostwhite:16316671,gold:16766720,goldenrod:14329120,gray:8421504,green:32768,greenyellow:11403055,grey:8421504,honeydew:15794160,hotpink:16738740,indianred:13458524,indigo:4915330,ivory:16777200,khaki:15787660,lavender:15132410,lavenderblush:16773365,lawngreen:8190976,lemonchiffon:16775885,lightblue:11393254,lightcoral:15761536,lightcyan:14745599,lightgoldenrodyellow:16448210,lightgray:13882323,lightgreen:9498256,lightgrey:13882323,lightpink:16758465,lightsalmon:16752762,lightseagreen:2142890,lightskyblue:8900346,lightslategray:7833753,lightslategrey:7833753,lightsteelblue:11584734,lightyellow:16777184,lime:65280,limegreen:3329330,linen:16445670,magenta:16711935,maroon:8388608,mediumaquamarine:6737322,mediumblue:205,mediumorchid:12211667,mediumpurple:9662683,mediumseagreen:3978097,mediumslateblue:8087790,mediumspringgreen:64154,mediumturquoise:4772300,mediumvioletred:13047173,midnightblue:1644912,mintcream:16121850,mistyrose:16770273,moccasin:16770229,navajowhite:16768685,navy:128,oldlace:16643558,olive:8421376,olivedrab:7048739,orange:16753920,orangered:16729344,orchid:14315734,palegoldenrod:15657130,palegreen:10025880,paleturquoise:11529966,palevioletred:14381203,papayawhip:16773077,peachpuff:16767673,peru:13468991,pink:16761035,plum:14524637,powderblue:11591910,purple:8388736,rebeccapurple:6697881,red:16711680,rosybrown:12357519,royalblue:4286945,saddlebrown:9127187,salmon:16416882,sandybrown:16032864,seagreen:3050327,seashell:16774638,sienna:10506797,silver:12632256,skyblue:8900331,slateblue:6970061,slategray:7372944,slategrey:7372944,snow:16775930,springgreen:65407,steelblue:4620980,tan:13808780,teal:32896,thistle:14204888,tomato:16737095,turquoise:4251856,violet:15631086,wheat:16113331,white:16777215,whitesmoke:16119285,yellow:16776960,yellowgreen:10145074});Ga.forEach(function(n,t){Ga.set(n,yt(t))}),ta.functor=Et,ta.xhr=At(y),ta.dsv=function(n,t){function e(n,e,i){arguments.length<3&&(i=e,e=null);var o=Nt(n,t,null==e?r:u(e),i);return o.row=function(n){return arguments.length?o.response(null==(e=n)?r:u(n)):e},o}function r(n){return e.parse(n.responseText)}function u(n){return function(t){return e.parse(t.responseText,n)}}function i(t){return t.map(o).join(n)}function o(n){return a.test(n)?'"'+n.replace(/\"/g,'""')+'"':n}var a=new RegExp('["'+n+"\n]"),c=n.charCodeAt(0);return e.parse=function(n,t){var r;return e.parseRows(n,function(n,e){if(r)return r(n,e-1);var u=new Function("d","return {"+n.map(function(n,t){return JSON.stringify(n)+": d["+t+"]"}).join(",")+"}");r=t?function(n,e){return t(u(n),e)}:u})},e.parseRows=function(n,t){function e(){if(s>=l)return o;if(u)return u=!1,i;var t=s;if(34===n.charCodeAt(t)){for(var e=t;e++s;){var r=n.charCodeAt(s++),a=1;if(10===r)u=!0;else if(13===r)u=!0,10===n.charCodeAt(s)&&(++s,++a);else if(r!==c)continue;return n.slice(t,s-a)}return n.slice(t)}for(var r,u,i={},o={},a=[],l=n.length,s=0,f=0;(r=e())!==o;){for(var h=[];r!==i&&r!==o;)h.push(r),r=e();t&&null==(h=t(h,f++))||a.push(h)}return a},e.format=function(t){if(Array.isArray(t[0]))return e.formatRows(t);var r=new m,u=[];return t.forEach(function(n){for(var t in n)r.has(t)||u.push(r.add(t))}),[u.map(o).join(n)].concat(t.map(function(t){return u.map(function(n){return o(t[n])}).join(n)})).join("\n")},e.formatRows=function(n){return n.map(i).join("\n")},e},ta.csv=ta.dsv(",","text/csv"),ta.tsv=ta.dsv(" ","text/tab-separated-values");var Ka,Qa,nc,tc,ec,rc=this[x(this,"requestAnimationFrame")]||function(n){setTimeout(n,17)};ta.timer=function(n,t,e){var r=arguments.length;2>r&&(t=0),3>r&&(e=Date.now());var u=e+t,i={c:n,t:u,f:!1,n:null};Qa?Qa.n=i:Ka=i,Qa=i,nc||(tc=clearTimeout(tc),nc=1,rc(qt))},ta.timer.flush=function(){Lt(),Tt()},ta.round=function(n,t){return t?Math.round(n*(t=Math.pow(10,t)))/t:Math.round(n)};var uc=["y","z","a","f","p","n","\xb5","m","","k","M","G","T","P","E","Z","Y"].map(Dt);ta.formatPrefix=function(n,t){var e=0;return n&&(0>n&&(n*=-1),t&&(n=ta.round(n,Rt(n,t))),e=1+Math.floor(1e-12+Math.log(n)/Math.LN10),e=Math.max(-24,Math.min(24,3*Math.floor((e-1)/3)))),uc[8+e/3]};var ic=/(?:([^{])?([<>=^]))?([+\- ])?([$#])?(0)?(\d+)?(,)?(\.-?\d+)?([a-z%])?/i,oc=ta.map({b:function(n){return n.toString(2)},c:function(n){return String.fromCharCode(n)},o:function(n){return n.toString(8)},x:function(n){return n.toString(16)},X:function(n){return n.toString(16).toUpperCase()},g:function(n,t){return n.toPrecision(t)},e:function(n,t){return n.toExponential(t)},f:function(n,t){return n.toFixed(t)},r:function(n,t){return(n=ta.round(n,Rt(n,t))).toFixed(Math.max(0,Math.min(20,Rt(n*(1+1e-15),t))))}}),ac=ta.time={},cc=Date;jt.prototype={getDate:function(){return this._.getUTCDate()},getDay:function(){return this._.getUTCDay()},getFullYear:function(){return this._.getUTCFullYear()},getHours:function(){return this._.getUTCHours()},getMilliseconds:function(){return this._.getUTCMilliseconds()},getMinutes:function(){return this._.getUTCMinutes()},getMonth:function(){return this._.getUTCMonth()},getSeconds:function(){return this._.getUTCSeconds()},getTime:function(){return this._.getTime()},getTimezoneOffset:function(){return 0},valueOf:function(){return this._.valueOf()},setDate:function(){lc.setUTCDate.apply(this._,arguments)},setDay:function(){lc.setUTCDay.apply(this._,arguments)},setFullYear:function(){lc.setUTCFullYear.apply(this._,arguments)},setHours:function(){lc.setUTCHours.apply(this._,arguments)},setMilliseconds:function(){lc.setUTCMilliseconds.apply(this._,arguments)},setMinutes:function(){lc.setUTCMinutes.apply(this._,arguments)},setMonth:function(){lc.setUTCMonth.apply(this._,arguments)},setSeconds:function(){lc.setUTCSeconds.apply(this._,arguments)},setTime:function(){lc.setTime.apply(this._,arguments)}};var lc=Date.prototype;ac.year=Ft(function(n){return n=ac.day(n),n.setMonth(0,1),n},function(n,t){n.setFullYear(n.getFullYear()+t)},function(n){return n.getFullYear()}),ac.years=ac.year.range,ac.years.utc=ac.year.utc.range,ac.day=Ft(function(n){var t=new cc(2e3,0);return t.setFullYear(n.getFullYear(),n.getMonth(),n.getDate()),t},function(n,t){n.setDate(n.getDate()+t)},function(n){return n.getDate()-1}),ac.days=ac.day.range,ac.days.utc=ac.day.utc.range,ac.dayOfYear=function(n){var t=ac.year(n);return Math.floor((n-t-6e4*(n.getTimezoneOffset()-t.getTimezoneOffset()))/864e5)},["sunday","monday","tuesday","wednesday","thursday","friday","saturday"].forEach(function(n,t){t=7-t;var e=ac[n]=Ft(function(n){return(n=ac.day(n)).setDate(n.getDate()-(n.getDay()+t)%7),n},function(n,t){n.setDate(n.getDate()+7*Math.floor(t))},function(n){var e=ac.year(n).getDay();return Math.floor((ac.dayOfYear(n)+(e+t)%7)/7)-(e!==t)});ac[n+"s"]=e.range,ac[n+"s"].utc=e.utc.range,ac[n+"OfYear"]=function(n){var e=ac.year(n).getDay();return Math.floor((ac.dayOfYear(n)+(e+t)%7)/7)}}),ac.week=ac.sunday,ac.weeks=ac.sunday.range,ac.weeks.utc=ac.sunday.utc.range,ac.weekOfYear=ac.sundayOfYear;var sc={"-":"",_:" ",0:"0"},fc=/^\s*\d+/,hc=/^%/;ta.locale=function(n){return{numberFormat:Pt(n),timeFormat:Ot(n)}};var gc=ta.locale({decimal:".",thousands:",",grouping:[3],currency:["$",""],dateTime:"%a %b %e %X %Y",date:"%m/%d/%Y",time:"%H:%M:%S",periods:["AM","PM"],days:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],shortDays:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],months:["January","February","March","April","May","June","July","August","September","October","November","December"],shortMonths:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]});ta.format=gc.numberFormat,ta.geo={},ce.prototype={s:0,t:0,add:function(n){le(n,this.t,pc),le(pc.s,this.s,this),this.s?this.t+=pc.t:this.s=pc.t +},reset:function(){this.s=this.t=0},valueOf:function(){return this.s}};var pc=new ce;ta.geo.stream=function(n,t){n&&vc.hasOwnProperty(n.type)?vc[n.type](n,t):se(n,t)};var vc={Feature:function(n,t){se(n.geometry,t)},FeatureCollection:function(n,t){for(var e=n.features,r=-1,u=e.length;++rn?4*qa+n:n,Mc.lineStart=Mc.lineEnd=Mc.point=b}};ta.geo.bounds=function(){function n(n,t){M.push(x=[s=n,h=n]),f>t&&(f=t),t>g&&(g=t)}function t(t,e){var r=pe([t*Da,e*Da]);if(m){var u=de(m,r),i=[u[1],-u[0],0],o=de(i,u);Me(o),o=xe(o);var c=t-p,l=c>0?1:-1,v=o[0]*Pa*l,d=ga(c)>180;if(d^(v>l*p&&l*t>v)){var y=o[1]*Pa;y>g&&(g=y)}else if(v=(v+360)%360-180,d^(v>l*p&&l*t>v)){var y=-o[1]*Pa;f>y&&(f=y)}else f>e&&(f=e),e>g&&(g=e);d?p>t?a(s,t)>a(s,h)&&(h=t):a(t,h)>a(s,h)&&(s=t):h>=s?(s>t&&(s=t),t>h&&(h=t)):t>p?a(s,t)>a(s,h)&&(h=t):a(t,h)>a(s,h)&&(s=t)}else n(t,e);m=r,p=t}function e(){b.point=t}function r(){x[0]=s,x[1]=h,b.point=n,m=null}function u(n,e){if(m){var r=n-p;y+=ga(r)>180?r+(r>0?360:-360):r}else v=n,d=e;Mc.point(n,e),t(n,e)}function i(){Mc.lineStart()}function o(){u(v,d),Mc.lineEnd(),ga(y)>Ca&&(s=-(h=180)),x[0]=s,x[1]=h,m=null}function a(n,t){return(t-=n)<0?t+360:t}function c(n,t){return n[0]-t[0]}function l(n,t){return t[0]<=t[1]?t[0]<=n&&n<=t[1]:nyc?(s=-(h=180),f=-(g=90)):y>Ca?g=90:-Ca>y&&(f=-90),x[0]=s,x[1]=h}};return function(n){g=h=-(s=f=1/0),M=[],ta.geo.stream(n,b);var t=M.length;if(t){M.sort(c);for(var e,r=1,u=M[0],i=[u];t>r;++r)e=M[r],l(e[0],u)||l(e[1],u)?(a(u[0],e[1])>a(u[0],u[1])&&(u[1]=e[1]),a(e[0],u[1])>a(u[0],u[1])&&(u[0]=e[0])):i.push(u=e);for(var o,e,p=-1/0,t=i.length-1,r=0,u=i[t];t>=r;u=e,++r)e=i[r],(o=a(u[1],e[0]))>p&&(p=o,s=e[0],h=u[1])}return M=x=null,1/0===s||1/0===f?[[0/0,0/0],[0/0,0/0]]:[[s,f],[h,g]]}}(),ta.geo.centroid=function(n){xc=bc=_c=wc=Sc=kc=Ec=Ac=Nc=Cc=zc=0,ta.geo.stream(n,qc);var t=Nc,e=Cc,r=zc,u=t*t+e*e+r*r;return za>u&&(t=kc,e=Ec,r=Ac,Ca>bc&&(t=_c,e=wc,r=Sc),u=t*t+e*e+r*r,za>u)?[0/0,0/0]:[Math.atan2(e,t)*Pa,tt(r/Math.sqrt(u))*Pa]};var xc,bc,_c,wc,Sc,kc,Ec,Ac,Nc,Cc,zc,qc={sphere:b,point:_e,lineStart:Se,lineEnd:ke,polygonStart:function(){qc.lineStart=Ee},polygonEnd:function(){qc.lineStart=Se}},Lc=Le(Ne,Pe,je,[-qa,-qa/2]),Tc=1e9;ta.geo.clipExtent=function(){var n,t,e,r,u,i,o={stream:function(n){return u&&(u.valid=!1),u=i(n),u.valid=!0,u},extent:function(a){return arguments.length?(i=Ie(n=+a[0][0],t=+a[0][1],e=+a[1][0],r=+a[1][1]),u&&(u.valid=!1,u=null),o):[[n,t],[e,r]]}};return o.extent([[0,0],[960,500]])},(ta.geo.conicEqualArea=function(){return Ye(Ze)}).raw=Ze,ta.geo.albers=function(){return ta.geo.conicEqualArea().rotate([96,0]).center([-.6,38.7]).parallels([29.5,45.5]).scale(1070)},ta.geo.albersUsa=function(){function n(n){var i=n[0],o=n[1];return t=null,e(i,o),t||(r(i,o),t)||u(i,o),t}var t,e,r,u,i=ta.geo.albers(),o=ta.geo.conicEqualArea().rotate([154,0]).center([-2,58.5]).parallels([55,65]),a=ta.geo.conicEqualArea().rotate([157,0]).center([-3,19.9]).parallels([8,18]),c={point:function(n,e){t=[n,e]}};return n.invert=function(n){var t=i.scale(),e=i.translate(),r=(n[0]-e[0])/t,u=(n[1]-e[1])/t;return(u>=.12&&.234>u&&r>=-.425&&-.214>r?o:u>=.166&&.234>u&&r>=-.214&&-.115>r?a:i).invert(n)},n.stream=function(n){var t=i.stream(n),e=o.stream(n),r=a.stream(n);return{point:function(n,u){t.point(n,u),e.point(n,u),r.point(n,u)},sphere:function(){t.sphere(),e.sphere(),r.sphere()},lineStart:function(){t.lineStart(),e.lineStart(),r.lineStart()},lineEnd:function(){t.lineEnd(),e.lineEnd(),r.lineEnd()},polygonStart:function(){t.polygonStart(),e.polygonStart(),r.polygonStart()},polygonEnd:function(){t.polygonEnd(),e.polygonEnd(),r.polygonEnd()}}},n.precision=function(t){return arguments.length?(i.precision(t),o.precision(t),a.precision(t),n):i.precision()},n.scale=function(t){return arguments.length?(i.scale(t),o.scale(.35*t),a.scale(t),n.translate(i.translate())):i.scale()},n.translate=function(t){if(!arguments.length)return i.translate();var l=i.scale(),s=+t[0],f=+t[1];return e=i.translate(t).clipExtent([[s-.455*l,f-.238*l],[s+.455*l,f+.238*l]]).stream(c).point,r=o.translate([s-.307*l,f+.201*l]).clipExtent([[s-.425*l+Ca,f+.12*l+Ca],[s-.214*l-Ca,f+.234*l-Ca]]).stream(c).point,u=a.translate([s-.205*l,f+.212*l]).clipExtent([[s-.214*l+Ca,f+.166*l+Ca],[s-.115*l-Ca,f+.234*l-Ca]]).stream(c).point,n},n.scale(1070)};var Rc,Dc,Pc,Uc,jc,Fc,Hc={point:b,lineStart:b,lineEnd:b,polygonStart:function(){Dc=0,Hc.lineStart=Ve},polygonEnd:function(){Hc.lineStart=Hc.lineEnd=Hc.point=b,Rc+=ga(Dc/2)}},Oc={point:Xe,lineStart:b,lineEnd:b,polygonStart:b,polygonEnd:b},Ic={point:We,lineStart:Je,lineEnd:Ge,polygonStart:function(){Ic.lineStart=Ke},polygonEnd:function(){Ic.point=We,Ic.lineStart=Je,Ic.lineEnd=Ge}};ta.geo.path=function(){function n(n){return n&&("function"==typeof a&&i.pointRadius(+a.apply(this,arguments)),o&&o.valid||(o=u(i)),ta.geo.stream(n,o)),i.result()}function t(){return o=null,n}var e,r,u,i,o,a=4.5;return n.area=function(n){return Rc=0,ta.geo.stream(n,u(Hc)),Rc},n.centroid=function(n){return _c=wc=Sc=kc=Ec=Ac=Nc=Cc=zc=0,ta.geo.stream(n,u(Ic)),zc?[Nc/zc,Cc/zc]:Ac?[kc/Ac,Ec/Ac]:Sc?[_c/Sc,wc/Sc]:[0/0,0/0]},n.bounds=function(n){return jc=Fc=-(Pc=Uc=1/0),ta.geo.stream(n,u(Oc)),[[Pc,Uc],[jc,Fc]]},n.projection=function(n){return arguments.length?(u=(e=n)?n.stream||tr(n):y,t()):e},n.context=function(n){return arguments.length?(i=null==(r=n)?new $e:new Qe(n),"function"!=typeof a&&i.pointRadius(a),t()):r},n.pointRadius=function(t){return arguments.length?(a="function"==typeof t?t:(i.pointRadius(+t),+t),n):a},n.projection(ta.geo.albersUsa()).context(null)},ta.geo.transform=function(n){return{stream:function(t){var e=new er(t);for(var r in n)e[r]=n[r];return e}}},er.prototype={point:function(n,t){this.stream.point(n,t)},sphere:function(){this.stream.sphere()},lineStart:function(){this.stream.lineStart()},lineEnd:function(){this.stream.lineEnd()},polygonStart:function(){this.stream.polygonStart()},polygonEnd:function(){this.stream.polygonEnd()}},ta.geo.projection=ur,ta.geo.projectionMutator=ir,(ta.geo.equirectangular=function(){return ur(ar)}).raw=ar.invert=ar,ta.geo.rotation=function(n){function t(t){return t=n(t[0]*Da,t[1]*Da),t[0]*=Pa,t[1]*=Pa,t}return n=lr(n[0]%360*Da,n[1]*Da,n.length>2?n[2]*Da:0),t.invert=function(t){return t=n.invert(t[0]*Da,t[1]*Da),t[0]*=Pa,t[1]*=Pa,t},t},cr.invert=ar,ta.geo.circle=function(){function n(){var n="function"==typeof r?r.apply(this,arguments):r,t=lr(-n[0]*Da,-n[1]*Da,0).invert,u=[];return e(null,null,1,{point:function(n,e){u.push(n=t(n,e)),n[0]*=Pa,n[1]*=Pa}}),{type:"Polygon",coordinates:[u]}}var t,e,r=[0,0],u=6;return n.origin=function(t){return arguments.length?(r=t,n):r},n.angle=function(r){return arguments.length?(e=gr((t=+r)*Da,u*Da),n):t},n.precision=function(r){return arguments.length?(e=gr(t*Da,(u=+r)*Da),n):u},n.angle(90)},ta.geo.distance=function(n,t){var e,r=(t[0]-n[0])*Da,u=n[1]*Da,i=t[1]*Da,o=Math.sin(r),a=Math.cos(r),c=Math.sin(u),l=Math.cos(u),s=Math.sin(i),f=Math.cos(i);return Math.atan2(Math.sqrt((e=f*o)*e+(e=l*s-c*f*a)*e),c*s+l*f*a)},ta.geo.graticule=function(){function n(){return{type:"MultiLineString",coordinates:t()}}function t(){return ta.range(Math.ceil(i/d)*d,u,d).map(h).concat(ta.range(Math.ceil(l/m)*m,c,m).map(g)).concat(ta.range(Math.ceil(r/p)*p,e,p).filter(function(n){return ga(n%d)>Ca}).map(s)).concat(ta.range(Math.ceil(a/v)*v,o,v).filter(function(n){return ga(n%m)>Ca}).map(f))}var e,r,u,i,o,a,c,l,s,f,h,g,p=10,v=p,d=90,m=360,y=2.5;return n.lines=function(){return t().map(function(n){return{type:"LineString",coordinates:n}})},n.outline=function(){return{type:"Polygon",coordinates:[h(i).concat(g(c).slice(1),h(u).reverse().slice(1),g(l).reverse().slice(1))]}},n.extent=function(t){return arguments.length?n.majorExtent(t).minorExtent(t):n.minorExtent()},n.majorExtent=function(t){return arguments.length?(i=+t[0][0],u=+t[1][0],l=+t[0][1],c=+t[1][1],i>u&&(t=i,i=u,u=t),l>c&&(t=l,l=c,c=t),n.precision(y)):[[i,l],[u,c]]},n.minorExtent=function(t){return arguments.length?(r=+t[0][0],e=+t[1][0],a=+t[0][1],o=+t[1][1],r>e&&(t=r,r=e,e=t),a>o&&(t=a,a=o,o=t),n.precision(y)):[[r,a],[e,o]]},n.step=function(t){return arguments.length?n.majorStep(t).minorStep(t):n.minorStep()},n.majorStep=function(t){return arguments.length?(d=+t[0],m=+t[1],n):[d,m]},n.minorStep=function(t){return arguments.length?(p=+t[0],v=+t[1],n):[p,v]},n.precision=function(t){return arguments.length?(y=+t,s=vr(a,o,90),f=dr(r,e,y),h=vr(l,c,90),g=dr(i,u,y),n):y},n.majorExtent([[-180,-90+Ca],[180,90-Ca]]).minorExtent([[-180,-80-Ca],[180,80+Ca]])},ta.geo.greatArc=function(){function n(){return{type:"LineString",coordinates:[t||r.apply(this,arguments),e||u.apply(this,arguments)]}}var t,e,r=mr,u=yr;return n.distance=function(){return ta.geo.distance(t||r.apply(this,arguments),e||u.apply(this,arguments))},n.source=function(e){return arguments.length?(r=e,t="function"==typeof e?null:e,n):r},n.target=function(t){return arguments.length?(u=t,e="function"==typeof t?null:t,n):u},n.precision=function(){return arguments.length?n:0},n},ta.geo.interpolate=function(n,t){return Mr(n[0]*Da,n[1]*Da,t[0]*Da,t[1]*Da)},ta.geo.length=function(n){return Yc=0,ta.geo.stream(n,Zc),Yc};var Yc,Zc={sphere:b,point:b,lineStart:xr,lineEnd:b,polygonStart:b,polygonEnd:b},Vc=br(function(n){return Math.sqrt(2/(1+n))},function(n){return 2*Math.asin(n/2)});(ta.geo.azimuthalEqualArea=function(){return ur(Vc)}).raw=Vc;var Xc=br(function(n){var t=Math.acos(n);return t&&t/Math.sin(t)},y);(ta.geo.azimuthalEquidistant=function(){return ur(Xc)}).raw=Xc,(ta.geo.conicConformal=function(){return Ye(_r)}).raw=_r,(ta.geo.conicEquidistant=function(){return Ye(wr)}).raw=wr;var $c=br(function(n){return 1/n},Math.atan);(ta.geo.gnomonic=function(){return ur($c)}).raw=$c,Sr.invert=function(n,t){return[n,2*Math.atan(Math.exp(t))-Ra]},(ta.geo.mercator=function(){return kr(Sr)}).raw=Sr;var Bc=br(function(){return 1},Math.asin);(ta.geo.orthographic=function(){return ur(Bc)}).raw=Bc;var Wc=br(function(n){return 1/(1+n)},function(n){return 2*Math.atan(n)});(ta.geo.stereographic=function(){return ur(Wc)}).raw=Wc,Er.invert=function(n,t){return[-t,2*Math.atan(Math.exp(n))-Ra]},(ta.geo.transverseMercator=function(){var n=kr(Er),t=n.center,e=n.rotate;return n.center=function(n){return n?t([-n[1],n[0]]):(n=t(),[n[1],-n[0]])},n.rotate=function(n){return n?e([n[0],n[1],n.length>2?n[2]+90:90]):(n=e(),[n[0],n[1],n[2]-90])},e([0,0,90])}).raw=Er,ta.geom={},ta.geom.hull=function(n){function t(n){if(n.length<3)return[];var t,u=Et(e),i=Et(r),o=n.length,a=[],c=[];for(t=0;o>t;t++)a.push([+u.call(this,n[t],t),+i.call(this,n[t],t),t]);for(a.sort(zr),t=0;o>t;t++)c.push([a[t][0],-a[t][1]]);var l=Cr(a),s=Cr(c),f=s[0]===l[0],h=s[s.length-1]===l[l.length-1],g=[];for(t=l.length-1;t>=0;--t)g.push(n[a[l[t]][2]]);for(t=+f;t=r&&l.x<=i&&l.y>=u&&l.y<=o?[[r,o],[i,o],[i,u],[r,u]]:[];s.point=n[a]}),t}function e(n){return n.map(function(n,t){return{x:Math.round(i(n,t)/Ca)*Ca,y:Math.round(o(n,t)/Ca)*Ca,i:t}})}var r=Ar,u=Nr,i=r,o=u,a=ul;return n?t(n):(t.links=function(n){return iu(e(n)).edges.filter(function(n){return n.l&&n.r}).map(function(t){return{source:n[t.l.i],target:n[t.r.i]}})},t.triangles=function(n){var t=[];return iu(e(n)).cells.forEach(function(e,r){for(var u,i,o=e.site,a=e.edges.sort(Yr),c=-1,l=a.length,s=a[l-1].edge,f=s.l===o?s.r:s.l;++c=l,h=r>=s,g=h<<1|f;n.leaf=!1,n=n.nodes[g]||(n.nodes[g]=su()),f?u=l:a=l,h?o=s:c=s,i(n,t,e,r,u,o,a,c)}var s,f,h,g,p,v,d,m,y,M=Et(a),x=Et(c);if(null!=t)v=t,d=e,m=r,y=u;else if(m=y=-(v=d=1/0),f=[],h=[],p=n.length,o)for(g=0;p>g;++g)s=n[g],s.xm&&(m=s.x),s.y>y&&(y=s.y),f.push(s.x),h.push(s.y);else for(g=0;p>g;++g){var b=+M(s=n[g],g),_=+x(s,g);v>b&&(v=b),d>_&&(d=_),b>m&&(m=b),_>y&&(y=_),f.push(b),h.push(_)}var w=m-v,S=y-d;w>S?y=d+w:m=v+S;var k=su();if(k.add=function(n){i(k,n,+M(n,++g),+x(n,g),v,d,m,y)},k.visit=function(n){fu(n,k,v,d,m,y)},k.find=function(n){return hu(k,n[0],n[1],v,d,m,y)},g=-1,null==t){for(;++g=0?n.slice(0,t):n,r=t>=0?n.slice(t+1):"in";return e=cl.get(e)||al,r=ll.get(r)||y,Mu(r(e.apply(null,ea.call(arguments,1))))},ta.interpolateHcl=Lu,ta.interpolateHsl=Tu,ta.interpolateLab=Ru,ta.interpolateRound=Du,ta.transform=function(n){var t=ua.createElementNS(ta.ns.prefix.svg,"g");return(ta.transform=function(n){if(null!=n){t.setAttribute("transform",n);var e=t.transform.baseVal.consolidate()}return new Pu(e?e.matrix:sl)})(n)},Pu.prototype.toString=function(){return"translate("+this.translate+")rotate("+this.rotate+")skewX("+this.skew+")scale("+this.scale+")"};var sl={a:1,b:0,c:0,d:1,e:0,f:0};ta.interpolateTransform=Hu,ta.layout={},ta.layout.bundle=function(){return function(n){for(var t=[],e=-1,r=n.length;++ea*a/d){if(p>c){var l=t.charge/c;n.px-=i*l,n.py-=o*l}return!0}if(t.point&&c&&p>c){var l=t.pointCharge/c;n.px-=i*l,n.py-=o*l}}return!t.charge}}function t(n){n.px=ta.event.x,n.py=ta.event.y,a.resume()}var e,r,u,i,o,a={},c=ta.dispatch("start","tick","end"),l=[1,1],s=.9,f=fl,h=hl,g=-30,p=gl,v=.1,d=.64,m=[],M=[];return a.tick=function(){if((r*=.99)<.005)return c.end({type:"end",alpha:r=0}),!0;var t,e,a,f,h,p,d,y,x,b=m.length,_=M.length;for(e=0;_>e;++e)a=M[e],f=a.source,h=a.target,y=h.x-f.x,x=h.y-f.y,(p=y*y+x*x)&&(p=r*i[e]*((p=Math.sqrt(p))-u[e])/p,y*=p,x*=p,h.x-=y*(d=f.weight/(h.weight+f.weight)),h.y-=x*d,f.x+=y*(d=1-d),f.y+=x*d);if((d=r*v)&&(y=l[0]/2,x=l[1]/2,e=-1,d))for(;++e0?n:0:n>0&&(c.start({type:"start",alpha:r=n}),ta.timer(a.tick)),a):r},a.start=function(){function n(n,r){if(!e){for(e=new Array(c),a=0;c>a;++a)e[a]=[];for(a=0;s>a;++a){var u=M[a];e[u.source.index].push(u.target),e[u.target.index].push(u.source)}}for(var i,o=e[t],a=-1,l=o.length;++at;++t)(r=m[t]).index=t,r.weight=0;for(t=0;s>t;++t)r=M[t],"number"==typeof r.source&&(r.source=m[r.source]),"number"==typeof r.target&&(r.target=m[r.target]),++r.source.weight,++r.target.weight;for(t=0;c>t;++t)r=m[t],isNaN(r.x)&&(r.x=n("x",p)),isNaN(r.y)&&(r.y=n("y",v)),isNaN(r.px)&&(r.px=r.x),isNaN(r.py)&&(r.py=r.y);if(u=[],"function"==typeof f)for(t=0;s>t;++t)u[t]=+f.call(this,M[t],t);else for(t=0;s>t;++t)u[t]=f;if(i=[],"function"==typeof h)for(t=0;s>t;++t)i[t]=+h.call(this,M[t],t);else for(t=0;s>t;++t)i[t]=h;if(o=[],"function"==typeof g)for(t=0;c>t;++t)o[t]=+g.call(this,m[t],t);else for(t=0;c>t;++t)o[t]=g;return a.resume()},a.resume=function(){return a.alpha(.1)},a.stop=function(){return a.alpha(0)},a.drag=function(){return e||(e=ta.behavior.drag().origin(y).on("dragstart.force",Xu).on("drag.force",t).on("dragend.force",$u)),arguments.length?void this.on("mouseover.force",Bu).on("mouseout.force",Wu).call(e):e},ta.rebind(a,c,"on")};var fl=20,hl=1,gl=1/0;ta.layout.hierarchy=function(){function n(u){var i,o=[u],a=[];for(u.depth=0;null!=(i=o.pop());)if(a.push(i),(l=e.call(n,i,i.depth))&&(c=l.length)){for(var c,l,s;--c>=0;)o.push(s=l[c]),s.parent=i,s.depth=i.depth+1;r&&(i.value=0),i.children=l}else r&&(i.value=+r.call(n,i,i.depth)||0),delete i.children;return Qu(u,function(n){var e,u;t&&(e=n.children)&&e.sort(t),r&&(u=n.parent)&&(u.value+=n.value)}),a}var t=ei,e=ni,r=ti;return n.sort=function(e){return arguments.length?(t=e,n):t},n.children=function(t){return arguments.length?(e=t,n):e},n.value=function(t){return arguments.length?(r=t,n):r},n.revalue=function(t){return r&&(Ku(t,function(n){n.children&&(n.value=0)}),Qu(t,function(t){var e;t.children||(t.value=+r.call(n,t,t.depth)||0),(e=t.parent)&&(e.value+=t.value)})),t},n},ta.layout.partition=function(){function n(t,e,r,u){var i=t.children;if(t.x=e,t.y=t.depth*u,t.dx=r,t.dy=u,i&&(o=i.length)){var o,a,c,l=-1;for(r=t.value?r/t.value:0;++lf?-1:1),p=(f-c*g)/ta.sum(l),v=ta.range(c),d=[];return null!=e&&v.sort(e===pl?function(n,t){return l[t]-l[n]}:function(n,t){return e(o[n],o[t])}),v.forEach(function(n){d[n]={data:o[n],value:a=l[n],startAngle:s,endAngle:s+=a*p+g,padAngle:h}}),d}var t=Number,e=pl,r=0,u=La,i=0;return n.value=function(e){return arguments.length?(t=e,n):t},n.sort=function(t){return arguments.length?(e=t,n):e},n.startAngle=function(t){return arguments.length?(r=t,n):r},n.endAngle=function(t){return arguments.length?(u=t,n):u},n.padAngle=function(t){return arguments.length?(i=t,n):i},n};var pl={};ta.layout.stack=function(){function n(a,c){if(!(h=a.length))return a;var l=a.map(function(e,r){return t.call(n,e,r)}),s=l.map(function(t){return t.map(function(t,e){return[i.call(n,t,e),o.call(n,t,e)]})}),f=e.call(n,s,c);l=ta.permute(l,f),s=ta.permute(s,f);var h,g,p,v,d=r.call(n,s,c),m=l[0].length;for(p=0;m>p;++p)for(u.call(n,l[0][p],v=d[p],s[0][p][1]),g=1;h>g;++g)u.call(n,l[g][p],v+=s[g-1][p][1],s[g][p][1]);return a}var t=y,e=ai,r=ci,u=oi,i=ui,o=ii;return n.values=function(e){return arguments.length?(t=e,n):t},n.order=function(t){return arguments.length?(e="function"==typeof t?t:vl.get(t)||ai,n):e},n.offset=function(t){return arguments.length?(r="function"==typeof t?t:dl.get(t)||ci,n):r},n.x=function(t){return arguments.length?(i=t,n):i},n.y=function(t){return arguments.length?(o=t,n):o},n.out=function(t){return arguments.length?(u=t,n):u},n};var vl=ta.map({"inside-out":function(n){var t,e,r=n.length,u=n.map(li),i=n.map(si),o=ta.range(r).sort(function(n,t){return u[n]-u[t]}),a=0,c=0,l=[],s=[];for(t=0;r>t;++t)e=o[t],c>a?(a+=i[e],l.push(e)):(c+=i[e],s.push(e));return s.reverse().concat(l)},reverse:function(n){return ta.range(n.length).reverse()},"default":ai}),dl=ta.map({silhouette:function(n){var t,e,r,u=n.length,i=n[0].length,o=[],a=0,c=[];for(e=0;i>e;++e){for(t=0,r=0;u>t;t++)r+=n[t][e][1];r>a&&(a=r),o.push(r)}for(e=0;i>e;++e)c[e]=(a-o[e])/2;return c},wiggle:function(n){var t,e,r,u,i,o,a,c,l,s=n.length,f=n[0],h=f.length,g=[];for(g[0]=c=l=0,e=1;h>e;++e){for(t=0,u=0;s>t;++t)u+=n[t][e][1];for(t=0,i=0,a=f[e][0]-f[e-1][0];s>t;++t){for(r=0,o=(n[t][e][1]-n[t][e-1][1])/(2*a);t>r;++r)o+=(n[r][e][1]-n[r][e-1][1])/a;i+=o*n[t][e][1]}g[e]=c-=u?i/u*a:0,l>c&&(l=c)}for(e=0;h>e;++e)g[e]-=l;return g},expand:function(n){var t,e,r,u=n.length,i=n[0].length,o=1/u,a=[];for(e=0;i>e;++e){for(t=0,r=0;u>t;t++)r+=n[t][e][1];if(r)for(t=0;u>t;t++)n[t][e][1]/=r;else for(t=0;u>t;t++)n[t][e][1]=o}for(e=0;i>e;++e)a[e]=0;return a},zero:ci});ta.layout.histogram=function(){function n(n,i){for(var o,a,c=[],l=n.map(e,this),s=r.call(this,l,i),f=u.call(this,s,l,i),i=-1,h=l.length,g=f.length-1,p=t?1:1/h;++i0)for(i=-1;++i=s[0]&&a<=s[1]&&(o=c[ta.bisect(f,a,1,g)-1],o.y+=p,o.push(n[i]));return c}var t=!0,e=Number,r=pi,u=hi;return n.value=function(t){return arguments.length?(e=t,n):e},n.range=function(t){return arguments.length?(r=Et(t),n):r},n.bins=function(t){return arguments.length?(u="number"==typeof t?function(n){return gi(n,t)}:Et(t),n):u},n.frequency=function(e){return arguments.length?(t=!!e,n):t},n},ta.layout.pack=function(){function n(n,i){var o=e.call(this,n,i),a=o[0],c=u[0],l=u[1],s=null==t?Math.sqrt:"function"==typeof t?t:function(){return t};if(a.x=a.y=0,Qu(a,function(n){n.r=+s(n.value)}),Qu(a,Mi),r){var f=r*(t?1:Math.max(2*a.r/c,2*a.r/l))/2;Qu(a,function(n){n.r+=f}),Qu(a,Mi),Qu(a,function(n){n.r-=f})}return _i(a,c/2,l/2,t?1:1/Math.max(2*a.r/c,2*a.r/l)),o}var t,e=ta.layout.hierarchy().sort(vi),r=0,u=[1,1];return n.size=function(t){return arguments.length?(u=t,n):u},n.radius=function(e){return arguments.length?(t=null==e||"function"==typeof e?e:+e,n):t},n.padding=function(t){return arguments.length?(r=+t,n):r},Gu(n,e)},ta.layout.tree=function(){function n(n,u){var s=o.call(this,n,u),f=s[0],h=t(f);if(Qu(h,e),h.parent.m=-h.z,Ku(h,r),l)Ku(f,i);else{var g=f,p=f,v=f;Ku(f,function(n){n.xp.x&&(p=n),n.depth>v.depth&&(v=n)});var d=a(g,p)/2-g.x,m=c[0]/(p.x+a(p,g)/2+d),y=c[1]/(v.depth||1);Ku(f,function(n){n.x=(n.x+d)*m,n.y=n.depth*y})}return s}function t(n){for(var t,e={A:null,children:[n]},r=[e];null!=(t=r.pop());)for(var u,i=t.children,o=0,a=i.length;a>o;++o)r.push((i[o]=u={_:i[o],parent:t,children:(u=i[o].children)&&u.slice()||[],A:null,a:null,z:0,m:0,c:0,s:0,t:null,i:o}).a=u);return e.children[0]}function e(n){var t=n.children,e=n.parent.children,r=n.i?e[n.i-1]:null;if(t.length){Ni(n);var i=(t[0].z+t[t.length-1].z)/2;r?(n.z=r.z+a(n._,r._),n.m=n.z-i):n.z=i}else r&&(n.z=r.z+a(n._,r._));n.parent.A=u(n,r,n.parent.A||e[0])}function r(n){n._.x=n.z+n.parent.m,n.m+=n.parent.m}function u(n,t,e){if(t){for(var r,u=n,i=n,o=t,c=u.parent.children[0],l=u.m,s=i.m,f=o.m,h=c.m;o=Ei(o),u=ki(u),o&&u;)c=ki(c),i=Ei(i),i.a=n,r=o.z+f-u.z-l+a(o._,u._),r>0&&(Ai(Ci(o,n,e),n,r),l+=r,s+=r),f+=o.m,l+=u.m,h+=c.m,s+=i.m;o&&!Ei(i)&&(i.t=o,i.m+=f-s),u&&!ki(c)&&(c.t=u,c.m+=l-h,e=n)}return e}function i(n){n.x*=c[0],n.y=n.depth*c[1]}var o=ta.layout.hierarchy().sort(null).value(null),a=Si,c=[1,1],l=null;return n.separation=function(t){return arguments.length?(a=t,n):a},n.size=function(t){return arguments.length?(l=null==(c=t)?i:null,n):l?null:c},n.nodeSize=function(t){return arguments.length?(l=null==(c=t)?null:i,n):l?c:null},Gu(n,o)},ta.layout.cluster=function(){function n(n,i){var o,a=t.call(this,n,i),c=a[0],l=0;Qu(c,function(n){var t=n.children;t&&t.length?(n.x=qi(t),n.y=zi(t)):(n.x=o?l+=e(n,o):0,n.y=0,o=n)});var s=Li(c),f=Ti(c),h=s.x-e(s,f)/2,g=f.x+e(f,s)/2;return Qu(c,u?function(n){n.x=(n.x-c.x)*r[0],n.y=(c.y-n.y)*r[1]}:function(n){n.x=(n.x-h)/(g-h)*r[0],n.y=(1-(c.y?n.y/c.y:1))*r[1]}),a}var t=ta.layout.hierarchy().sort(null).value(null),e=Si,r=[1,1],u=!1;return n.separation=function(t){return arguments.length?(e=t,n):e},n.size=function(t){return arguments.length?(u=null==(r=t),n):u?null:r},n.nodeSize=function(t){return arguments.length?(u=null!=(r=t),n):u?r:null},Gu(n,t)},ta.layout.treemap=function(){function n(n,t){for(var e,r,u=-1,i=n.length;++ut?0:t),e.area=isNaN(r)||0>=r?0:r}function t(e){var i=e.children;if(i&&i.length){var o,a,c,l=f(e),s=[],h=i.slice(),p=1/0,v="slice"===g?l.dx:"dice"===g?l.dy:"slice-dice"===g?1&e.depth?l.dy:l.dx:Math.min(l.dx,l.dy);for(n(h,l.dx*l.dy/e.value),s.area=0;(c=h.length)>0;)s.push(o=h[c-1]),s.area+=o.area,"squarify"!==g||(a=r(s,v))<=p?(h.pop(),p=a):(s.area-=s.pop().area,u(s,v,l,!1),v=Math.min(l.dx,l.dy),s.length=s.area=0,p=1/0);s.length&&(u(s,v,l,!0),s.length=s.area=0),i.forEach(t)}}function e(t){var r=t.children;if(r&&r.length){var i,o=f(t),a=r.slice(),c=[];for(n(a,o.dx*o.dy/t.value),c.area=0;i=a.pop();)c.push(i),c.area+=i.area,null!=i.z&&(u(c,i.z?o.dx:o.dy,o,!a.length),c.length=c.area=0);r.forEach(e)}}function r(n,t){for(var e,r=n.area,u=0,i=1/0,o=-1,a=n.length;++oe&&(i=e),e>u&&(u=e));return r*=r,t*=t,r?Math.max(t*u*p/r,r/(t*i*p)):1/0}function u(n,t,e,r){var u,i=-1,o=n.length,a=e.x,l=e.y,s=t?c(n.area/t):0;if(t==e.dx){for((r||s>e.dy)&&(s=e.dy);++ie.dx)&&(s=e.dx);++ie&&(t=1),1>e&&(n=0),function(){var e,r,u;do e=2*Math.random()-1,r=2*Math.random()-1,u=e*e+r*r;while(!u||u>1);return n+t*e*Math.sqrt(-2*Math.log(u)/u)}},logNormal:function(){var n=ta.random.normal.apply(ta,arguments);return function(){return Math.exp(n())}},bates:function(n){var t=ta.random.irwinHall(n);return function(){return t()/n}},irwinHall:function(n){return function(){for(var t=0,e=0;n>e;e++)t+=Math.random();return t}}},ta.scale={};var ml={floor:y,ceil:y};ta.scale.linear=function(){return Ii([0,1],[0,1],mu,!1)};var yl={s:1,g:1,p:1,r:1,e:1};ta.scale.log=function(){return Ji(ta.scale.linear().domain([0,1]),10,!0,[1,10])};var Ml=ta.format(".0e"),xl={floor:function(n){return-Math.ceil(-n)},ceil:function(n){return-Math.floor(-n)}};ta.scale.pow=function(){return Gi(ta.scale.linear(),1,[0,1])},ta.scale.sqrt=function(){return ta.scale.pow().exponent(.5)},ta.scale.ordinal=function(){return Qi([],{t:"range",a:[[]]})},ta.scale.category10=function(){return ta.scale.ordinal().range(bl)},ta.scale.category20=function(){return ta.scale.ordinal().range(_l)},ta.scale.category20b=function(){return ta.scale.ordinal().range(wl)},ta.scale.category20c=function(){return ta.scale.ordinal().range(Sl)};var bl=[2062260,16744206,2924588,14034728,9725885,9197131,14907330,8355711,12369186,1556175].map(Mt),_l=[2062260,11454440,16744206,16759672,2924588,10018698,14034728,16750742,9725885,12955861,9197131,12885140,14907330,16234194,8355711,13092807,12369186,14408589,1556175,10410725].map(Mt),wl=[3750777,5395619,7040719,10264286,6519097,9216594,11915115,13556636,9202993,12426809,15186514,15190932,8666169,11356490,14049643,15177372,8077683,10834324,13528509,14589654].map(Mt),Sl=[3244733,7057110,10406625,13032431,15095053,16616764,16625259,16634018,3253076,7652470,10607003,13101504,7695281,10394312,12369372,14342891,6513507,9868950,12434877,14277081].map(Mt);ta.scale.quantile=function(){return no([],[])},ta.scale.quantize=function(){return to(0,1,[0,1])},ta.scale.threshold=function(){return eo([.5],[0,1])},ta.scale.identity=function(){return ro([0,1])},ta.svg={},ta.svg.arc=function(){function n(){var n=Math.max(0,+e.apply(this,arguments)),l=Math.max(0,+r.apply(this,arguments)),s=o.apply(this,arguments)-Ra,f=a.apply(this,arguments)-Ra,h=Math.abs(f-s),g=s>f?0:1;if(n>l&&(p=l,l=n,n=p),h>=Ta)return t(l,g)+(n?t(n,1-g):"")+"Z";var p,v,d,m,y,M,x,b,_,w,S,k,E=0,A=0,N=[];if((m=(+c.apply(this,arguments)||0)/2)&&(d=i===kl?Math.sqrt(n*n+l*l):+i.apply(this,arguments),g||(A*=-1),l&&(A=tt(d/l*Math.sin(m))),n&&(E=tt(d/n*Math.sin(m)))),l){y=l*Math.cos(s+A),M=l*Math.sin(s+A),x=l*Math.cos(f-A),b=l*Math.sin(f-A);var C=Math.abs(f-s-2*A)<=qa?0:1;if(A&&so(y,M,x,b)===g^C){var z=(s+f)/2;y=l*Math.cos(z),M=l*Math.sin(z),x=b=null}}else y=M=0;if(n){_=n*Math.cos(f-E),w=n*Math.sin(f-E),S=n*Math.cos(s+E),k=n*Math.sin(s+E);var q=Math.abs(s-f+2*E)<=qa?0:1;if(E&&so(_,w,S,k)===1-g^q){var L=(s+f)/2;_=n*Math.cos(L),w=n*Math.sin(L),S=k=null}}else _=w=0;if((p=Math.min(Math.abs(l-n)/2,+u.apply(this,arguments)))>.001){v=l>n^g?0:1;var T=null==S?[_,w]:null==x?[y,M]:Lr([y,M],[S,k],[x,b],[_,w]),R=y-T[0],D=M-T[1],P=x-T[0],U=b-T[1],j=1/Math.sin(Math.acos((R*P+D*U)/(Math.sqrt(R*R+D*D)*Math.sqrt(P*P+U*U)))/2),F=Math.sqrt(T[0]*T[0]+T[1]*T[1]);if(null!=x){var H=Math.min(p,(l-F)/(j+1)),O=fo(null==S?[_,w]:[S,k],[y,M],l,H,g),I=fo([x,b],[_,w],l,H,g);p===H?N.push("M",O[0],"A",H,",",H," 0 0,",v," ",O[1],"A",l,",",l," 0 ",1-g^so(O[1][0],O[1][1],I[1][0],I[1][1]),",",g," ",I[1],"A",H,",",H," 0 0,",v," ",I[0]):N.push("M",O[0],"A",H,",",H," 0 1,",v," ",I[0])}else N.push("M",y,",",M);if(null!=S){var Y=Math.min(p,(n-F)/(j-1)),Z=fo([y,M],[S,k],n,-Y,g),V=fo([_,w],null==x?[y,M]:[x,b],n,-Y,g);p===Y?N.push("L",V[0],"A",Y,",",Y," 0 0,",v," ",V[1],"A",n,",",n," 0 ",g^so(V[1][0],V[1][1],Z[1][0],Z[1][1]),",",1-g," ",Z[1],"A",Y,",",Y," 0 0,",v," ",Z[0]):N.push("L",V[0],"A",Y,",",Y," 0 0,",v," ",Z[0])}else N.push("L",_,",",w)}else N.push("M",y,",",M),null!=x&&N.push("A",l,",",l," 0 ",C,",",g," ",x,",",b),N.push("L",_,",",w),null!=S&&N.push("A",n,",",n," 0 ",q,",",1-g," ",S,",",k);return N.push("Z"),N.join("")}function t(n,t){return"M0,"+n+"A"+n+","+n+" 0 1,"+t+" 0,"+-n+"A"+n+","+n+" 0 1,"+t+" 0,"+n}var e=io,r=oo,u=uo,i=kl,o=ao,a=co,c=lo;return n.innerRadius=function(t){return arguments.length?(e=Et(t),n):e},n.outerRadius=function(t){return arguments.length?(r=Et(t),n):r},n.cornerRadius=function(t){return arguments.length?(u=Et(t),n):u},n.padRadius=function(t){return arguments.length?(i=t==kl?kl:Et(t),n):i},n.startAngle=function(t){return arguments.length?(o=Et(t),n):o},n.endAngle=function(t){return arguments.length?(a=Et(t),n):a},n.padAngle=function(t){return arguments.length?(c=Et(t),n):c},n.centroid=function(){var n=(+e.apply(this,arguments)+ +r.apply(this,arguments))/2,t=(+o.apply(this,arguments)+ +a.apply(this,arguments))/2-Ra;return[Math.cos(t)*n,Math.sin(t)*n]},n};var kl="auto";ta.svg.line=function(){return ho(y)};var El=ta.map({linear:go,"linear-closed":po,step:vo,"step-before":mo,"step-after":yo,basis:So,"basis-open":ko,"basis-closed":Eo,bundle:Ao,cardinal:bo,"cardinal-open":Mo,"cardinal-closed":xo,monotone:To});El.forEach(function(n,t){t.key=n,t.closed=/-closed$/.test(n)});var Al=[0,2/3,1/3,0],Nl=[0,1/3,2/3,0],Cl=[0,1/6,2/3,1/6];ta.svg.line.radial=function(){var n=ho(Ro);return n.radius=n.x,delete n.x,n.angle=n.y,delete n.y,n},mo.reverse=yo,yo.reverse=mo,ta.svg.area=function(){return Do(y)},ta.svg.area.radial=function(){var n=Do(Ro);return n.radius=n.x,delete n.x,n.innerRadius=n.x0,delete n.x0,n.outerRadius=n.x1,delete n.x1,n.angle=n.y,delete n.y,n.startAngle=n.y0,delete n.y0,n.endAngle=n.y1,delete n.y1,n},ta.svg.chord=function(){function n(n,a){var c=t(this,i,n,a),l=t(this,o,n,a);return"M"+c.p0+r(c.r,c.p1,c.a1-c.a0)+(e(c,l)?u(c.r,c.p1,c.r,c.p0):u(c.r,c.p1,l.r,l.p0)+r(l.r,l.p1,l.a1-l.a0)+u(l.r,l.p1,c.r,c.p0))+"Z"}function t(n,t,e,r){var u=t.call(n,e,r),i=a.call(n,u,r),o=c.call(n,u,r)-Ra,s=l.call(n,u,r)-Ra;return{r:i,a0:o,a1:s,p0:[i*Math.cos(o),i*Math.sin(o)],p1:[i*Math.cos(s),i*Math.sin(s)]}}function e(n,t){return n.a0==t.a0&&n.a1==t.a1}function r(n,t,e){return"A"+n+","+n+" 0 "+ +(e>qa)+",1 "+t}function u(n,t,e,r){return"Q 0,0 "+r}var i=mr,o=yr,a=Po,c=ao,l=co;return n.radius=function(t){return arguments.length?(a=Et(t),n):a},n.source=function(t){return arguments.length?(i=Et(t),n):i},n.target=function(t){return arguments.length?(o=Et(t),n):o},n.startAngle=function(t){return arguments.length?(c=Et(t),n):c},n.endAngle=function(t){return arguments.length?(l=Et(t),n):l},n},ta.svg.diagonal=function(){function n(n,u){var i=t.call(this,n,u),o=e.call(this,n,u),a=(i.y+o.y)/2,c=[i,{x:i.x,y:a},{x:o.x,y:a},o];return c=c.map(r),"M"+c[0]+"C"+c[1]+" "+c[2]+" "+c[3]}var t=mr,e=yr,r=Uo;return n.source=function(e){return arguments.length?(t=Et(e),n):t},n.target=function(t){return arguments.length?(e=Et(t),n):e},n.projection=function(t){return arguments.length?(r=t,n):r},n},ta.svg.diagonal.radial=function(){var n=ta.svg.diagonal(),t=Uo,e=n.projection;return n.projection=function(n){return arguments.length?e(jo(t=n)):t},n},ta.svg.symbol=function(){function n(n,r){return(zl.get(t.call(this,n,r))||Oo)(e.call(this,n,r))}var t=Ho,e=Fo;return n.type=function(e){return arguments.length?(t=Et(e),n):t},n.size=function(t){return arguments.length?(e=Et(t),n):e},n};var zl=ta.map({circle:Oo,cross:function(n){var t=Math.sqrt(n/5)/2;return"M"+-3*t+","+-t+"H"+-t+"V"+-3*t+"H"+t+"V"+-t+"H"+3*t+"V"+t+"H"+t+"V"+3*t+"H"+-t+"V"+t+"H"+-3*t+"Z"},diamond:function(n){var t=Math.sqrt(n/(2*Ll)),e=t*Ll;return"M0,"+-t+"L"+e+",0 0,"+t+" "+-e+",0Z"},square:function(n){var t=Math.sqrt(n)/2;return"M"+-t+","+-t+"L"+t+","+-t+" "+t+","+t+" "+-t+","+t+"Z"},"triangle-down":function(n){var t=Math.sqrt(n/ql),e=t*ql/2;return"M0,"+e+"L"+t+","+-e+" "+-t+","+-e+"Z"},"triangle-up":function(n){var t=Math.sqrt(n/ql),e=t*ql/2;return"M0,"+-e+"L"+t+","+e+" "+-t+","+e+"Z"}});ta.svg.symbolTypes=zl.keys();var ql=Math.sqrt(3),Ll=Math.tan(30*Da);_a.transition=function(n){for(var t,e,r=Tl||++Ul,u=Xo(n),i=[],o=Rl||{time:Date.now(),ease:Su,delay:0,duration:250},a=-1,c=this.length;++ai;i++){u.push(t=[]);for(var e=this[i],a=0,c=e.length;c>a;a++)(r=e[a])&&n.call(r,r.__data__,a,i)&&t.push(r)}return Yo(u,this.namespace,this.id)},Pl.tween=function(n,t){var e=this.id,r=this.namespace;return arguments.length<2?this.node()[r][e].tween.get(n):Y(this,null==t?function(t){t[r][e].tween.remove(n)}:function(u){u[r][e].tween.set(n,t)})},Pl.attr=function(n,t){function e(){this.removeAttribute(a)}function r(){this.removeAttributeNS(a.space,a.local)}function u(n){return null==n?e:(n+="",function(){var t,e=this.getAttribute(a);return e!==n&&(t=o(e,n),function(n){this.setAttribute(a,t(n))})})}function i(n){return null==n?r:(n+="",function(){var t,e=this.getAttributeNS(a.space,a.local);return e!==n&&(t=o(e,n),function(n){this.setAttributeNS(a.space,a.local,t(n))})})}if(arguments.length<2){for(t in n)this.attr(t,n[t]);return this}var o="transform"==n?Hu:mu,a=ta.ns.qualify(n);return Zo(this,"attr."+n,t,a.local?i:u)},Pl.attrTween=function(n,t){function e(n,e){var r=t.call(this,n,e,this.getAttribute(u));return r&&function(n){this.setAttribute(u,r(n))}}function r(n,e){var r=t.call(this,n,e,this.getAttributeNS(u.space,u.local));return r&&function(n){this.setAttributeNS(u.space,u.local,r(n))}}var u=ta.ns.qualify(n);return this.tween("attr."+n,u.local?r:e)},Pl.style=function(n,e,r){function u(){this.style.removeProperty(n)}function i(e){return null==e?u:(e+="",function(){var u,i=t(this).getComputedStyle(this,null).getPropertyValue(n);return i!==e&&(u=mu(i,e),function(t){this.style.setProperty(n,u(t),r)})})}var o=arguments.length;if(3>o){if("string"!=typeof n){2>o&&(e="");for(r in n)this.style(r,n[r],e);return this}r=""}return Zo(this,"style."+n,e,i)},Pl.styleTween=function(n,e,r){function u(u,i){var o=e.call(this,u,i,t(this).getComputedStyle(this,null).getPropertyValue(n));return o&&function(t){this.style.setProperty(n,o(t),r)}}return arguments.length<3&&(r=""),this.tween("style."+n,u)},Pl.text=function(n){return Zo(this,"text",n,Vo)},Pl.remove=function(){var n=this.namespace;return this.each("end.transition",function(){var t;this[n].count<2&&(t=this.parentNode)&&t.removeChild(this)})},Pl.ease=function(n){var t=this.id,e=this.namespace;return arguments.length<1?this.node()[e][t].ease:("function"!=typeof n&&(n=ta.ease.apply(ta,arguments)),Y(this,function(r){r[e][t].ease=n}))},Pl.delay=function(n){var t=this.id,e=this.namespace;return arguments.length<1?this.node()[e][t].delay:Y(this,"function"==typeof n?function(r,u,i){r[e][t].delay=+n.call(r,r.__data__,u,i)}:(n=+n,function(r){r[e][t].delay=n}))},Pl.duration=function(n){var t=this.id,e=this.namespace;return arguments.length<1?this.node()[e][t].duration:Y(this,"function"==typeof n?function(r,u,i){r[e][t].duration=Math.max(1,n.call(r,r.__data__,u,i))}:(n=Math.max(1,n),function(r){r[e][t].duration=n}))},Pl.each=function(n,t){var e=this.id,r=this.namespace;if(arguments.length<2){var u=Rl,i=Tl;try{Tl=e,Y(this,function(t,u,i){Rl=t[r][e],n.call(t,t.__data__,u,i)})}finally{Rl=u,Tl=i}}else Y(this,function(u){var i=u[r][e];(i.event||(i.event=ta.dispatch("start","end","interrupt"))).on(n,t)});return this},Pl.transition=function(){for(var n,t,e,r,u=this.id,i=++Ul,o=this.namespace,a=[],c=0,l=this.length;l>c;c++){a.push(n=[]);for(var t=this[c],s=0,f=t.length;f>s;s++)(e=t[s])&&(r=e[o][u],$o(e,s,o,i,{time:r.time,ease:r.ease,delay:r.delay+r.duration,duration:r.duration})),n.push(e)}return Yo(a,o,i)},ta.svg.axis=function(){function n(n){n.each(function(){var n,l=ta.select(this),s=this.__chart__||e,f=this.__chart__=e.copy(),h=null==c?f.ticks?f.ticks.apply(f,a):f.domain():c,g=null==t?f.tickFormat?f.tickFormat.apply(f,a):y:t,p=l.selectAll(".tick").data(h,f),v=p.enter().insert("g",".domain").attr("class","tick").style("opacity",Ca),d=ta.transition(p.exit()).style("opacity",Ca).remove(),m=ta.transition(p.order()).style("opacity",1),M=Math.max(u,0)+o,x=Ui(f),b=l.selectAll(".domain").data([0]),_=(b.enter().append("path").attr("class","domain"),ta.transition(b));v.append("line"),v.append("text");var w,S,k,E,A=v.select("line"),N=m.select("line"),C=p.select("text").text(g),z=v.select("text"),q=m.select("text"),L="top"===r||"left"===r?-1:1;if("bottom"===r||"top"===r?(n=Bo,w="x",k="y",S="x2",E="y2",C.attr("dy",0>L?"0em":".71em").style("text-anchor","middle"),_.attr("d","M"+x[0]+","+L*i+"V0H"+x[1]+"V"+L*i)):(n=Wo,w="y",k="x",S="y2",E="x2",C.attr("dy",".32em").style("text-anchor",0>L?"end":"start"),_.attr("d","M"+L*i+","+x[0]+"H0V"+x[1]+"H"+L*i)),A.attr(E,L*u),z.attr(k,L*M),N.attr(S,0).attr(E,L*u),q.attr(w,0).attr(k,L*M),f.rangeBand){var T=f,R=T.rangeBand()/2;s=f=function(n){return T(n)+R}}else s.rangeBand?s=f:d.call(n,f,s);v.call(n,s,f),m.call(n,f,f)})}var t,e=ta.scale.linear(),r=jl,u=6,i=6,o=3,a=[10],c=null;return n.scale=function(t){return arguments.length?(e=t,n):e},n.orient=function(t){return arguments.length?(r=t in Fl?t+"":jl,n):r},n.ticks=function(){return arguments.length?(a=arguments,n):a},n.tickValues=function(t){return arguments.length?(c=t,n):c},n.tickFormat=function(e){return arguments.length?(t=e,n):t},n.tickSize=function(t){var e=arguments.length;return e?(u=+t,i=+arguments[e-1],n):u},n.innerTickSize=function(t){return arguments.length?(u=+t,n):u},n.outerTickSize=function(t){return arguments.length?(i=+t,n):i},n.tickPadding=function(t){return arguments.length?(o=+t,n):o},n.tickSubdivide=function(){return arguments.length&&n},n};var jl="bottom",Fl={top:1,right:1,bottom:1,left:1};ta.svg.brush=function(){function n(t){t.each(function(){var t=ta.select(this).style("pointer-events","all").style("-webkit-tap-highlight-color","rgba(0,0,0,0)").on("mousedown.brush",i).on("touchstart.brush",i),o=t.selectAll(".background").data([0]);o.enter().append("rect").attr("class","background").style("visibility","hidden").style("cursor","crosshair"),t.selectAll(".extent").data([0]).enter().append("rect").attr("class","extent").style("cursor","move");var a=t.selectAll(".resize").data(v,y);a.exit().remove(),a.enter().append("g").attr("class",function(n){return"resize "+n}).style("cursor",function(n){return Hl[n]}).append("rect").attr("x",function(n){return/[ew]$/.test(n)?-3:null}).attr("y",function(n){return/^[ns]/.test(n)?-3:null}).attr("width",6).attr("height",6).style("visibility","hidden"),a.style("display",n.empty()?"none":null);var c,f=ta.transition(t),h=ta.transition(o);l&&(c=Ui(l),h.attr("x",c[0]).attr("width",c[1]-c[0]),r(f)),s&&(c=Ui(s),h.attr("y",c[0]).attr("height",c[1]-c[0]),u(f)),e(f)})}function e(n){n.selectAll(".resize").attr("transform",function(n){return"translate("+f[+/e$/.test(n)]+","+h[+/^s/.test(n)]+")"})}function r(n){n.select(".extent").attr("x",f[0]),n.selectAll(".extent,.n>rect,.s>rect").attr("width",f[1]-f[0])}function u(n){n.select(".extent").attr("y",h[0]),n.selectAll(".extent,.e>rect,.w>rect").attr("height",h[1]-h[0])}function i(){function i(){32==ta.event.keyCode&&(C||(M=null,q[0]-=f[1],q[1]-=h[1],C=2),S())}function v(){32==ta.event.keyCode&&2==C&&(q[0]+=f[1],q[1]+=h[1],C=0,S())}function d(){var n=ta.mouse(b),t=!1;x&&(n[0]+=x[0],n[1]+=x[1]),C||(ta.event.altKey?(M||(M=[(f[0]+f[1])/2,(h[0]+h[1])/2]),q[0]=f[+(n[0]s?(u=r,r=s):u=s),v[0]!=r||v[1]!=u?(e?a=null:o=null,v[0]=r,v[1]=u,!0):void 0}function y(){d(),k.style("pointer-events","all").selectAll(".resize").style("display",n.empty()?"none":null),ta.select("body").style("cursor",null),L.on("mousemove.brush",null).on("mouseup.brush",null).on("touchmove.brush",null).on("touchend.brush",null).on("keydown.brush",null).on("keyup.brush",null),z(),w({type:"brushend"})}var M,x,b=this,_=ta.select(ta.event.target),w=c.of(b,arguments),k=ta.select(b),E=_.datum(),A=!/^(n|s)$/.test(E)&&l,N=!/^(e|w)$/.test(E)&&s,C=_.classed("extent"),z=W(b),q=ta.mouse(b),L=ta.select(t(b)).on("keydown.brush",i).on("keyup.brush",v);if(ta.event.changedTouches?L.on("touchmove.brush",d).on("touchend.brush",y):L.on("mousemove.brush",d).on("mouseup.brush",y),k.interrupt().selectAll("*").interrupt(),C)q[0]=f[0]-q[0],q[1]=h[0]-q[1];else if(E){var T=+/w$/.test(E),R=+/^n/.test(E);x=[f[1-T]-q[0],h[1-R]-q[1]],q[0]=f[T],q[1]=h[R]}else ta.event.altKey&&(M=q.slice());k.style("pointer-events","none").selectAll(".resize").style("display",null),ta.select("body").style("cursor",_.style("cursor")),w({type:"brushstart"}),d()}var o,a,c=E(n,"brushstart","brush","brushend"),l=null,s=null,f=[0,0],h=[0,0],g=!0,p=!0,v=Ol[0];return n.event=function(n){n.each(function(){var n=c.of(this,arguments),t={x:f,y:h,i:o,j:a},e=this.__chart__||t;this.__chart__=t,Tl?ta.select(this).transition().each("start.brush",function(){o=e.i,a=e.j,f=e.x,h=e.y,n({type:"brushstart"})}).tween("brush:brush",function(){var e=yu(f,t.x),r=yu(h,t.y);return o=a=null,function(u){f=t.x=e(u),h=t.y=r(u),n({type:"brush",mode:"resize"})}}).each("end.brush",function(){o=t.i,a=t.j,n({type:"brush",mode:"resize"}),n({type:"brushend"})}):(n({type:"brushstart"}),n({type:"brush",mode:"resize"}),n({type:"brushend"}))})},n.x=function(t){return arguments.length?(l=t,v=Ol[!l<<1|!s],n):l},n.y=function(t){return arguments.length?(s=t,v=Ol[!l<<1|!s],n):s},n.clamp=function(t){return arguments.length?(l&&s?(g=!!t[0],p=!!t[1]):l?g=!!t:s&&(p=!!t),n):l&&s?[g,p]:l?g:s?p:null},n.extent=function(t){var e,r,u,i,c;return arguments.length?(l&&(e=t[0],r=t[1],s&&(e=e[0],r=r[0]),o=[e,r],l.invert&&(e=l(e),r=l(r)),e>r&&(c=e,e=r,r=c),(e!=f[0]||r!=f[1])&&(f=[e,r])),s&&(u=t[0],i=t[1],l&&(u=u[1],i=i[1]),a=[u,i],s.invert&&(u=s(u),i=s(i)),u>i&&(c=u,u=i,i=c),(u!=h[0]||i!=h[1])&&(h=[u,i])),n):(l&&(o?(e=o[0],r=o[1]):(e=f[0],r=f[1],l.invert&&(e=l.invert(e),r=l.invert(r)),e>r&&(c=e,e=r,r=c))),s&&(a?(u=a[0],i=a[1]):(u=h[0],i=h[1],s.invert&&(u=s.invert(u),i=s.invert(i)),u>i&&(c=u,u=i,i=c))),l&&s?[[e,u],[r,i]]:l?[e,r]:s&&[u,i])},n.clear=function(){return n.empty()||(f=[0,0],h=[0,0],o=a=null),n},n.empty=function(){return!!l&&f[0]==f[1]||!!s&&h[0]==h[1]},ta.rebind(n,c,"on")};var Hl={n:"ns-resize",e:"ew-resize",s:"ns-resize",w:"ew-resize",nw:"nwse-resize",ne:"nesw-resize",se:"nwse-resize",sw:"nesw-resize"},Ol=[["n","e","s","w","nw","ne","se","sw"],["e","w"],["n","s"],[]],Il=ac.format=gc.timeFormat,Yl=Il.utc,Zl=Yl("%Y-%m-%dT%H:%M:%S.%LZ");Il.iso=Date.prototype.toISOString&&+new Date("2000-01-01T00:00:00.000Z")?Jo:Zl,Jo.parse=function(n){var t=new Date(n);return isNaN(t)?null:t},Jo.toString=Zl.toString,ac.second=Ft(function(n){return new cc(1e3*Math.floor(n/1e3))},function(n,t){n.setTime(n.getTime()+1e3*Math.floor(t))},function(n){return n.getSeconds()}),ac.seconds=ac.second.range,ac.seconds.utc=ac.second.utc.range,ac.minute=Ft(function(n){return new cc(6e4*Math.floor(n/6e4))},function(n,t){n.setTime(n.getTime()+6e4*Math.floor(t))},function(n){return n.getMinutes()}),ac.minutes=ac.minute.range,ac.minutes.utc=ac.minute.utc.range,ac.hour=Ft(function(n){var t=n.getTimezoneOffset()/60;return new cc(36e5*(Math.floor(n/36e5-t)+t))},function(n,t){n.setTime(n.getTime()+36e5*Math.floor(t))},function(n){return n.getHours()}),ac.hours=ac.hour.range,ac.hours.utc=ac.hour.utc.range,ac.month=Ft(function(n){return n=ac.day(n),n.setDate(1),n},function(n,t){n.setMonth(n.getMonth()+t)},function(n){return n.getMonth()}),ac.months=ac.month.range,ac.months.utc=ac.month.utc.range;var Vl=[1e3,5e3,15e3,3e4,6e4,3e5,9e5,18e5,36e5,108e5,216e5,432e5,864e5,1728e5,6048e5,2592e6,7776e6,31536e6],Xl=[[ac.second,1],[ac.second,5],[ac.second,15],[ac.second,30],[ac.minute,1],[ac.minute,5],[ac.minute,15],[ac.minute,30],[ac.hour,1],[ac.hour,3],[ac.hour,6],[ac.hour,12],[ac.day,1],[ac.day,2],[ac.week,1],[ac.month,1],[ac.month,3],[ac.year,1]],$l=Il.multi([[".%L",function(n){return n.getMilliseconds()}],[":%S",function(n){return n.getSeconds()}],["%I:%M",function(n){return n.getMinutes()}],["%I %p",function(n){return n.getHours()}],["%a %d",function(n){return n.getDay()&&1!=n.getDate()}],["%b %d",function(n){return 1!=n.getDate()}],["%B",function(n){return n.getMonth()}],["%Y",Ne]]),Bl={range:function(n,t,e){return ta.range(Math.ceil(n/e)*e,+t,e).map(Ko)},floor:y,ceil:y};Xl.year=ac.year,ac.scale=function(){return Go(ta.scale.linear(),Xl,$l)};var Wl=Xl.map(function(n){return[n[0].utc,n[1]]}),Jl=Yl.multi([[".%L",function(n){return n.getUTCMilliseconds()}],[":%S",function(n){return n.getUTCSeconds()}],["%I:%M",function(n){return n.getUTCMinutes()}],["%I %p",function(n){return n.getUTCHours()}],["%a %d",function(n){return n.getUTCDay()&&1!=n.getUTCDate()}],["%b %d",function(n){return 1!=n.getUTCDate()}],["%B",function(n){return n.getUTCMonth()}],["%Y",Ne]]);Wl.year=ac.year.utc,ac.scale.utc=function(){return Go(ta.scale.linear(),Wl,Jl)},ta.text=At(function(n){return n.responseText}),ta.json=function(n,t){return Nt(n,"application/json",Qo,t)},ta.html=function(n,t){return Nt(n,"text/html",na,t)},ta.xml=At(function(n){return n.responseXML}),"function"==typeof define&&define.amd?define(ta):"object"==typeof module&&module.exports&&(module.exports=ta),this.d3=ta}(); \ No newline at end of file diff --git a/public/js/jquery-1.9.0.min.js b/public/assets/jquery/jquery-1.9.0.min.js similarity index 100% rename from public/js/jquery-1.9.0.min.js rename to public/assets/jquery/jquery-1.9.0.min.js diff --git a/public/js/jquery.deserialize.min.js b/public/assets/jquery/jquery.deserialize.min.js similarity index 100% rename from public/js/jquery.deserialize.min.js rename to public/assets/jquery/jquery.deserialize.min.js diff --git a/public/css/application.css b/public/css/application.css new file mode 100644 index 000000000..e862d3c16 --- /dev/null +++ b/public/css/application.css @@ -0,0 +1,218 @@ +.list-group-item { + border-radius: 0 !important; + background: white; + padding: 10px 10px 10px 10px; + border: 0px; +} + +.active a.list-group-item { + background-color: blue; +!important; +} + +/** + * Modify the current navigation bar + **/ + +.navbar-default { + background-color: #ffffff; + border-color: #e8e8e8; + font-family: sans-serif; +} + +.navbar-default .navbar-brand { + color: #6a7478; +} + +.navbar-default .navbar-brand:hover, +.navbar-default .navbar-brand:focus { + color: #0084bf; +} + +.navbar-default .navbar-text { + color: #6a7478; +} + +.navbar-default .navbar-nav > li > a { + color: #6a7478; +} + +.navbar-default .navbar-nav > li > a:hover, +.navbar-default .navbar-nav > li > a:focus { + color: #0084bf; +} + +.navbar-default .navbar-nav > .active > a, +.navbar-default .navbar-nav > .active > a:hover, +.navbar-default .navbar-nav > .active > a:focus { + color: #0084bf; + background-color: #e8e8e8; +} + +.navbar-default .navbar-nav > .open > a, +.navbar-default .navbar-nav > .open > a:hover, +.navbar-default .navbar-nav > .open > a:focus { + color: #0084bf; + background-color: #e8e8e8; +} + +.navbar-default .navbar-toggle { + border-color: #e8e8e8; +} + +.navbar-default .navbar-toggle:hover, +.navbar-default .navbar-toggle:focus { + background-color: #e8e8e8; +} + +.nav-extension { + background-color: #0084BF; + height: 120px; + border: none; + margin-top: -20px; + padding: 0px; + z-index: -1; + position: absolute; + width: 100%; +} + +.details-container { + color: #5e6365; + font-family: 'Shift', sans-serif; + background: white; + margin: 30px 10px 10px 0px; + padding: 20px 15px 30px 25px; + width: 100%; + height: 100%; +} + +.shadow { + box-shadow: 0 1px 8px -2px rgb(82, 82, 82); +} + +.box { + position: relative; + border: none; +} + +.horizontal-line { + margin: 0px 0px 5px 0px; + padding: 0px; +} + +.history-top { + width: 100%; + margin-top: 30px; +} + +.help-details { + margin-left: 30px; + margin-top: 0px; + height: 100%; +} + +.help-menu { + margin-right: 10px; + margin-top: 0px; + height: 100%; +} + +.wf-button { + background-color: white; /* Green */ + display: inline-block; + cursor: pointer; + border-radius: 3px; + font-size: 14px; + font-weight: 500; + border: #0084BF 1px solid; + color: #0084BF; +} + +.wf-button:hover { + background-color: #008cc9; + color: white; +} + +.graph-select { + width: 200px; + margin-right: 10px; +} + +.wasted-resource-block { + color: #C00000; + font-family: "HelveticaNeue-Light", "Helvetica Neue Light", "Helvetica Neue", Arial Narrow, Helvetica, sans-serif; + font-size: 13px; + + width: 95px; +} + +.used-resource-block { + color: #0077B5; + font-family: "HelveticaNeue-Light", "Helvetica Neue Light", "Helvetica Neue", Arial Narrow, Helvetica, sans-serif; + font-size: 13px; + + width: 95px; +} + +.wait-time-block { + color: #C00000; + font-family: "HelveticaNeue-Light", "Helvetica Neue Light", "Helvetica Neue", Arial Narrow, Helvetica, sans-serif; + font-size: 13px; + width: 95px; +} + +.run-time-block { + color: #0077B5; + font-family: "HelveticaNeue-Light", "Helvetica Neue Light", "Helvetica Neue", Arial Narrow, Helvetica, sans-serif; + font-size: 13px; + width: 95px; +} + +.svg-graph { + width: 100%; + height: 100%; + overflow: visible; +} + +.history-graph { + width: 1078px; + height: 342px; +} + +.loading-indicator { + position: relative; + left: 50%; + top: 40%; + display: none; +} + +.history-table-first-column { + min-width: 200px; + height: 35px; +} + +.history-table-metrics-column { + padding: 0 !important; + min-width: 380px; + text-align: center !important; + height: 35px; +} + +.history-table-metrics-table { + padding: 0; + margin: 0; + width: 100%; + height: 35px; +} + +.table-fixed { + table-layout: fixed +} + +.tooltip-text-heuristics { + color: black; +} + +.graph-padding { + padding-left: 25px; +} \ No newline at end of file diff --git a/public/css/datepicker.css b/public/css/datepicker.css deleted file mode 100755 index b7065b7d5..000000000 --- a/public/css/datepicker.css +++ /dev/null @@ -1,182 +0,0 @@ -/*! - * Datepicker for Bootstrap - * - * Copyright 2012 Stefan Petre - * Licensed under the Apache License v2.0 - * http://www.apache.org/licenses/LICENSE-2.0 - * - */ -.datepicker { - top: 0; - left: 0; - padding: 4px; - margin-top: 1px; - -webkit-border-radius: 4px; - -moz-border-radius: 4px; - border-radius: 4px; - /*.dow { - border-top: 1px solid #ddd !important; - }*/ - -} -.datepicker:before { - content: ''; - display: inline-block; - border-left: 7px solid transparent; - border-right: 7px solid transparent; - border-bottom: 7px solid #ccc; - border-bottom-color: rgba(0, 0, 0, 0.2); - position: absolute; - top: -7px; - left: 6px; -} -.datepicker:after { - content: ''; - display: inline-block; - border-left: 6px solid transparent; - border-right: 6px solid transparent; - border-bottom: 6px solid #ffffff; - position: absolute; - top: -6px; - left: 7px; -} -.datepicker > div { - display: none; -} -.datepicker table { - width: 100%; - margin: 0; -} -.datepicker td, -.datepicker th { - text-align: center; - width: 20px; - height: 20px; - -webkit-border-radius: 4px; - -moz-border-radius: 4px; - border-radius: 4px; -} -.datepicker td.day:hover { - background: #eeeeee; - cursor: pointer; -} -.datepicker td.day.disabled { - color: #eeeeee; -} -.datepicker td.old, -.datepicker td.new { - color: #999999; -} -.datepicker td.active, -.datepicker td.active:hover { - color: #ffffff; - background-color: #006dcc; - background-image: -moz-linear-gradient(top, #0088cc, #0044cc); - background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#0088cc), to(#0044cc)); - background-image: -webkit-linear-gradient(top, #0088cc, #0044cc); - background-image: -o-linear-gradient(top, #0088cc, #0044cc); - background-image: linear-gradient(to bottom, #0088cc, #0044cc); - background-repeat: repeat-x; - filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff0088cc', endColorstr='#ff0044cc', GradientType=0); - border-color: #0044cc #0044cc #002a80; - border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); - *background-color: #0044cc; - /* Darken IE7 buttons by default so they stand out more given they won't have borders */ - - filter: progid:DXImageTransform.Microsoft.gradient(enabled = false); - color: #fff; - text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); -} -.datepicker td.active:hover, -.datepicker td.active:hover:hover, -.datepicker td.active:focus, -.datepicker td.active:hover:focus, -.datepicker td.active:active, -.datepicker td.active:hover:active, -.datepicker td.active.active, -.datepicker td.active:hover.active, -.datepicker td.active.disabled, -.datepicker td.active:hover.disabled, -.datepicker td.active[disabled], -.datepicker td.active:hover[disabled] { - color: #ffffff; - background-color: #0044cc; - *background-color: #003bb3; -} -.datepicker td.active:active, -.datepicker td.active:hover:active, -.datepicker td.active.active, -.datepicker td.active:hover.active { - background-color: #003399 \9; -} -.datepicker td span { - display: block; - width: 47px; - height: 54px; - line-height: 54px; - float: left; - margin: 2px; - cursor: pointer; - -webkit-border-radius: 4px; - -moz-border-radius: 4px; - border-radius: 4px; -} -.datepicker td span:hover { - background: #eeeeee; -} -.datepicker td span.active { - color: #ffffff; - background-color: #006dcc; - background-image: -moz-linear-gradient(top, #0088cc, #0044cc); - background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#0088cc), to(#0044cc)); - background-image: -webkit-linear-gradient(top, #0088cc, #0044cc); - background-image: -o-linear-gradient(top, #0088cc, #0044cc); - background-image: linear-gradient(to bottom, #0088cc, #0044cc); - background-repeat: repeat-x; - filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff0088cc', endColorstr='#ff0044cc', GradientType=0); - border-color: #0044cc #0044cc #002a80; - border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); - *background-color: #0044cc; - /* Darken IE7 buttons by default so they stand out more given they won't have borders */ - - filter: progid:DXImageTransform.Microsoft.gradient(enabled = false); - color: #fff; - text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); -} -.datepicker td span.active:hover, -.datepicker td span.active:focus, -.datepicker td span.active:active, -.datepicker td span.active.active, -.datepicker td span.active.disabled, -.datepicker td span.active[disabled] { - color: #ffffff; - background-color: #0044cc; - *background-color: #003bb3; -} -.datepicker td span.active:active, -.datepicker td span.active.active { - background-color: #003399 \9; -} -.datepicker td span.old { - color: #999999; -} -.datepicker th.switch { - width: 145px; -} -.datepicker th.next, -.datepicker th.prev { - font-size: 21px; -} -.datepicker thead tr:first-child th { - cursor: pointer; -} -.datepicker thead tr:first-child th:hover { - background: #eeeeee; -} -.input-append.date .add-on i, -.input-prepend.date .add-on i { - display: block; - cursor: pointer; - width: 16px; - height: 16px; -} \ No newline at end of file diff --git a/public/css/main.css b/public/css/main.css index a5e0abeb4..e2a8618bf 100644 --- a/public/css/main.css +++ b/public/css/main.css @@ -1,15 +1,33 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + .left-table { - display:table; + display:table; } .left-table tr { - display:table-row + display:table-row; + white-space:normal; } .left-table tr td { - display: table-cell; - white-space: pre; + display: table-cell; + white-space: pre; + word-wrap:break-word; } .left-table tr td:last-child{ - width: 100%; + width: 100%; }​ @@ -66,4 +84,144 @@ a.list-group-item-danger.active:focus { .label-severe[href]:hover, .label-severe[href]:focus { background-color: #da6425; -} \ No newline at end of file +} + +.list-group-item-alternate { + position: relative; + display: block; + padding: 10px 15px; + margin-bottom: -1px; + background-color: #fff; + border: 1px solid #ddd; +} +.list-group-item-alternate:nth-child(even){ + background-color: #f2f2f2; +} + +.left{display:inline-block} +.right{display:inline-block; float:right} + +/* Graph axis */ +.axis path, .axis line { + shape-rendering: crispEdges; +} +.x.axis path { + stroke: rgb(192, 208, 224); +} +.y.axis line { + fill:#0077b5; +} +.y.axis path { + display:none; /* Hide the y axis */ +} + +/* Override bootstrap. Horizontal scrollable history table */ +body { + font-family: "HelveticaNeue-Light", "Helvetica Neue Light", "Helvetica Neue", Arial Narrow, Helvetica, sans-serif; +} +.table-responsive { + overflow-x: auto; +} +.table-responsive > .table > thead > tr > th { + text-align:center; + vertical-align:top; + word-wrap:break-word; +} +.table-responsive > .table > tbody > tr > td { + text-align:left; +} +.table-responsive > .table > thead > tr > th, +.table-responsive > .table > tbody > tr > td { + overflow: hidden; + width: 120px; + padding-right:0px; +} +.table-responsive > .table > thead > tr > th > a{ + text-decoration:none; +} + +/* Tooltips for history table */ +.hasTooltip { + text-decoration:none; + position:relative; +} +.hasTooltip div { + display:none; + position:fixed; + font-size:12px; + color:#CCCCCC; + background:rgba(30, 30, 30, 0.9); + text-align: center; + border-radius:5px; + border:1px solid black; + padding:5px; + overflow:hidden; +} +.hasTooltip:hover div { + display:block; + z-index:1; +} + +.stacktrace{ + /* For Firefox */ + white-space: pre-wrap; + word-break: break-all; + + /* For Chrome and IE */ + word-wrap: break-word; + + font-size:11px; + font-family:monospace; + color:brown; +} + +.metrics-icons { + height: 20px; + width: 20px; +} + +.metrics-dashboard { + height: 30px; + margin-left: 10px; +} + +.wasted-resource-td { + background: #FFDFDF; + font-family: "HelveticaNeue-Light", "Helvetica Neue Light", "Helvetica Neue", Arial Narrow, Helvetica, sans-serif; + font-size:12px; +} + +.used-resource-td { + background: rgba(186, 222, 228, 0.46); + font-family: "HelveticaNeue-Light", "Helvetica Neue Light", "Helvetica Neue", Arial Narrow, Helvetica, sans-serif; + font-size:12px; + + +} + +.wait-time-td { + background: #FFE8E4; + font-family: "HelveticaNeue-Light", "Helvetica Neue Light", "Helvetica Neue", Arial Narrow, Helvetica, sans-serif; + font-size:12px; + +} + +.run-time-td { + background: #D6E7EF; + font-family: "HelveticaNeue-Light", "Helvetica Neue Light", "Helvetica Neue", Arial Narrow, Helvetica, sans-serif; + font-size:12px; +} + + +.metrics-icons-table { + height: 16px; + width: 16px; +} + + +.graphColor { + color: white; + background: rgba(30, 30, 30, 0.9) !important; +} + + diff --git a/public/images/loading.gif b/public/images/loading.gif new file mode 100644 index 000000000..3c2f7c058 Binary files /dev/null and b/public/images/loading.gif differ diff --git a/public/images/logo.png b/public/images/logo.png new file mode 100644 index 000000000..a7f7b9670 Binary files /dev/null and b/public/images/logo.png differ diff --git a/public/images/runtime.png b/public/images/runtime.png new file mode 100644 index 000000000..6f98430d9 Binary files /dev/null and b/public/images/runtime.png differ diff --git a/public/images/usedmemory.png b/public/images/usedmemory.png new file mode 100644 index 000000000..c55a084a6 Binary files /dev/null and b/public/images/usedmemory.png differ diff --git a/public/images/waittime.png b/public/images/waittime.png new file mode 100644 index 000000000..fb862f961 Binary files /dev/null and b/public/images/waittime.png differ diff --git a/public/images/wastedmemory.png b/public/images/wastedmemory.png new file mode 100644 index 000000000..627bf8d9c Binary files /dev/null and b/public/images/wastedmemory.png differ diff --git a/public/js/bootstrap-datepicker.js b/public/js/bootstrap-datepicker.js deleted file mode 100755 index bf3a56df0..000000000 --- a/public/js/bootstrap-datepicker.js +++ /dev/null @@ -1,474 +0,0 @@ -/* ========================================================= - * bootstrap-datepicker.js - * http://www.eyecon.ro/bootstrap-datepicker - * ========================================================= - * Copyright 2012 Stefan Petre - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ========================================================= */ - -!function( $ ) { - - // Picker object - - var Datepicker = function(element, options){ - this.element = $(element); - this.format = DPGlobal.parseFormat(options.format||this.element.data('date-format')||'mm/dd/yyyy'); - this.picker = $(DPGlobal.template) - .appendTo('body') - .on({ - click: $.proxy(this.click, this)//, - //mousedown: $.proxy(this.mousedown, this) - }); - this.isInput = this.element.is('input'); - this.component = this.element.is('.date') ? this.element.find('.add-on') : false; - - if (this.isInput) { - this.element.on({ - focus: $.proxy(this.show, this), - //blur: $.proxy(this.hide, this), - keyup: $.proxy(this.update, this) - }); - } else { - if (this.component){ - this.component.on('click', $.proxy(this.show, this)); - } else { - this.element.on('click', $.proxy(this.show, this)); - } - } - - this.minViewMode = options.minViewMode||this.element.data('date-minviewmode')||0; - if (typeof this.minViewMode === 'string') { - switch (this.minViewMode) { - case 'months': - this.minViewMode = 1; - break; - case 'years': - this.minViewMode = 2; - break; - default: - this.minViewMode = 0; - break; - } - } - this.viewMode = options.viewMode||this.element.data('date-viewmode')||0; - if (typeof this.viewMode === 'string') { - switch (this.viewMode) { - case 'months': - this.viewMode = 1; - break; - case 'years': - this.viewMode = 2; - break; - default: - this.viewMode = 0; - break; - } - } - this.startViewMode = this.viewMode; - this.weekStart = options.weekStart||this.element.data('date-weekstart')||0; - this.weekEnd = this.weekStart === 0 ? 6 : this.weekStart - 1; - this.onRender = options.onRender; - this.fillDow(); - this.fillMonths(); - this.update(); - this.showMode(); - }; - - Datepicker.prototype = { - constructor: Datepicker, - - show: function(e) { - this.picker.show(); - this.height = this.component ? this.component.outerHeight() : this.element.outerHeight(); - this.place(); - $(window).on('resize', $.proxy(this.place, this)); - if (e ) { - e.stopPropagation(); - e.preventDefault(); - } - if (!this.isInput) { - } - var that = this; - $(document).on('mousedown', function(ev){ - if ($(ev.target).closest('.datepicker').length == 0) { - that.hide(); - } - }); - this.element.trigger({ - type: 'show', - date: this.date - }); - }, - - hide: function(){ - this.picker.hide(); - $(window).off('resize', this.place); - this.viewMode = this.startViewMode; - this.showMode(); - if (!this.isInput) { - $(document).off('mousedown', this.hide); - } - //this.set(); - this.element.trigger({ - type: 'hide', - date: this.date - }); - }, - - set: function() { - var formated = DPGlobal.formatDate(this.date, this.format); - if (!this.isInput) { - if (this.component){ - this.element.find('input').prop('value', formated); - } - this.element.data('date', formated); - } else { - this.element.prop('value', formated); - } - }, - - setValue: function(newDate) { - if (typeof newDate === 'string') { - this.date = DPGlobal.parseDate(newDate, this.format); - } else { - this.date = new Date(newDate); - } - this.set(); - this.viewDate = new Date(this.date.getFullYear(), this.date.getMonth(), 1, 0, 0, 0, 0); - this.fill(); - }, - - place: function(){ - var offset = this.component ? this.component.offset() : this.element.offset(); - this.picker.css({ - top: offset.top + this.height, - left: offset.left - }); - }, - - update: function(newDate){ - this.date = DPGlobal.parseDate( - typeof newDate === 'string' ? newDate : (this.isInput ? this.element.prop('value') : this.element.data('date')), - this.format - ); - this.viewDate = new Date(this.date.getFullYear(), this.date.getMonth(), 1, 0, 0, 0, 0); - this.fill(); - }, - - fillDow: function(){ - var dowCnt = this.weekStart; - var html = ''; - while (dowCnt < this.weekStart + 7) { - html += ''+DPGlobal.dates.daysMin[(dowCnt++)%7]+''; - } - html += ''; - this.picker.find('.datepicker-days thead').append(html); - }, - - fillMonths: function(){ - var html = ''; - var i = 0 - while (i < 12) { - html += ''+DPGlobal.dates.monthsShort[i++]+''; - } - this.picker.find('.datepicker-months td').append(html); - }, - - fill: function() { - var d = new Date(this.viewDate), - year = d.getFullYear(), - month = d.getMonth(), - currentDate = this.date.valueOf(); - this.picker.find('.datepicker-days th:eq(1)') - .text(DPGlobal.dates.months[month]+' '+year); - var prevMonth = new Date(year, month-1, 28,0,0,0,0), - day = DPGlobal.getDaysInMonth(prevMonth.getFullYear(), prevMonth.getMonth()); - prevMonth.setDate(day); - prevMonth.setDate(day - (prevMonth.getDay() - this.weekStart + 7)%7); - var nextMonth = new Date(prevMonth); - nextMonth.setDate(nextMonth.getDate() + 42); - nextMonth = nextMonth.valueOf(); - var html = []; - var clsName, - prevY, - prevM; - while(prevMonth.valueOf() < nextMonth) { - if (prevMonth.getDay() === this.weekStart) { - html.push(''); - } - clsName = this.onRender(prevMonth); - prevY = prevMonth.getFullYear(); - prevM = prevMonth.getMonth(); - if ((prevM < month && prevY === year) || prevY < year) { - clsName += ' old'; - } else if ((prevM > month && prevY === year) || prevY > year) { - clsName += ' new'; - } - if (prevMonth.valueOf() === currentDate) { - clsName += ' active'; - } - html.push(''+prevMonth.getDate() + ''); - if (prevMonth.getDay() === this.weekEnd) { - html.push(''); - } - prevMonth.setDate(prevMonth.getDate()+1); - } - this.picker.find('.datepicker-days tbody').empty().append(html.join('')); - var currentYear = this.date.getFullYear(); - - var months = this.picker.find('.datepicker-months') - .find('th:eq(1)') - .text(year) - .end() - .find('span').removeClass('active'); - if (currentYear === year) { - months.eq(this.date.getMonth()).addClass('active'); - } - - html = ''; - year = parseInt(year/10, 10) * 10; - var yearCont = this.picker.find('.datepicker-years') - .find('th:eq(1)') - .text(year + '-' + (year + 9)) - .end() - .find('td'); - year -= 1; - for (var i = -1; i < 11; i++) { - html += ''+year+''; - year += 1; - } - yearCont.html(html); - }, - - click: function(e) { - e.stopPropagation(); - e.preventDefault(); - var target = $(e.target).closest('span, td, th'); - if (target.length === 1) { - switch(target[0].nodeName.toLowerCase()) { - case 'th': - switch(target[0].className) { - case 'switch': - this.showMode(1); - break; - case 'prev': - case 'next': - this.viewDate['set'+DPGlobal.modes[this.viewMode].navFnc].call( - this.viewDate, - this.viewDate['get'+DPGlobal.modes[this.viewMode].navFnc].call(this.viewDate) + - DPGlobal.modes[this.viewMode].navStep * (target[0].className === 'prev' ? -1 : 1) - ); - this.fill(); - this.set(); - break; - } - break; - case 'span': - if (target.is('.month')) { - var month = target.parent().find('span').index(target); - this.viewDate.setMonth(month); - } else { - var year = parseInt(target.text(), 10)||0; - this.viewDate.setFullYear(year); - } - if (this.viewMode !== 0) { - this.date = new Date(this.viewDate); - this.element.trigger({ - type: 'changeDate', - date: this.date, - viewMode: DPGlobal.modes[this.viewMode].clsName - }); - } - this.showMode(-1); - this.fill(); - this.set(); - break; - case 'td': - if (target.is('.day') && !target.is('.disabled')){ - var day = parseInt(target.text(), 10)||1; - var month = this.viewDate.getMonth(); - if (target.is('.old')) { - month -= 1; - } else if (target.is('.new')) { - month += 1; - } - var year = this.viewDate.getFullYear(); - this.date = new Date(year, month, day,0,0,0,0); - this.viewDate = new Date(year, month, Math.min(28, day),0,0,0,0); - this.fill(); - this.set(); - this.element.trigger({ - type: 'changeDate', - date: this.date, - viewMode: DPGlobal.modes[this.viewMode].clsName - }); - } - break; - } - } - }, - - mousedown: function(e){ - e.stopPropagation(); - e.preventDefault(); - }, - - showMode: function(dir) { - if (dir) { - this.viewMode = Math.max(this.minViewMode, Math.min(2, this.viewMode + dir)); - } - this.picker.find('>div').hide().filter('.datepicker-'+DPGlobal.modes[this.viewMode].clsName).show(); - } - }; - - $.fn.datepicker = function ( option, val ) { - return this.each(function () { - var $this = $(this), - data = $this.data('datepicker'), - options = typeof option === 'object' && option; - if (!data) { - $this.data('datepicker', (data = new Datepicker(this, $.extend({}, $.fn.datepicker.defaults,options)))); - } - if (typeof option === 'string') data[option](val); - }); - }; - - $.fn.datepicker.defaults = { - onRender: function(date) { - return ''; - } - }; - $.fn.datepicker.Constructor = Datepicker; - - var DPGlobal = { - modes: [ - { - clsName: 'days', - navFnc: 'Month', - navStep: 1 - }, - { - clsName: 'months', - navFnc: 'FullYear', - navStep: 1 - }, - { - clsName: 'years', - navFnc: 'FullYear', - navStep: 10 - }], - dates:{ - days: ["Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"], - daysShort: ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"], - daysMin: ["Su", "Mo", "Tu", "We", "Th", "Fr", "Sa", "Su"], - months: ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"], - monthsShort: ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"] - }, - isLeapYear: function (year) { - return (((year % 4 === 0) && (year % 100 !== 0)) || (year % 400 === 0)) - }, - getDaysInMonth: function (year, month) { - return [31, (DPGlobal.isLeapYear(year) ? 29 : 28), 31, 30, 31, 30, 31, 31, 30, 31, 30, 31][month] - }, - parseFormat: function(format){ - var separator = format.match(/[.\/\-\s].*?/), - parts = format.split(/\W+/); - if (!separator || !parts || parts.length === 0){ - throw new Error("Invalid date format."); - } - return {separator: separator, parts: parts}; - }, - parseDate: function(date, format) { - var parts = date.split(format.separator), - date = new Date(), - val; - date.setHours(0); - date.setMinutes(0); - date.setSeconds(0); - date.setMilliseconds(0); - if (parts.length === format.parts.length) { - var year = date.getFullYear(), day = date.getDate(), month = date.getMonth(); - for (var i=0, cnt = format.parts.length; i < cnt; i++) { - val = parseInt(parts[i], 10)||1; - switch(format.parts[i]) { - case 'dd': - case 'd': - day = val; - date.setDate(val); - break; - case 'mm': - case 'm': - month = val - 1; - date.setMonth(val - 1); - break; - case 'yy': - year = 2000 + val; - date.setFullYear(2000 + val); - break; - case 'yyyy': - year = val; - date.setFullYear(val); - break; - } - } - date = new Date(year, month, day, 0 ,0 ,0); - } - return date; - }, - formatDate: function(date, format){ - var val = { - d: date.getDate(), - m: date.getMonth() + 1, - yy: date.getFullYear().toString().substring(2), - yyyy: date.getFullYear() - }; - val.dd = (val.d < 10 ? '0' : '') + val.d; - val.mm = (val.m < 10 ? '0' : '') + val.m; - var date = []; - for (var i=0, cnt = format.parts.length; i < cnt; i++) { - date.push(val[format.parts[i]]); - } - return date.join(format.separator); - }, - headTemplate: ''+ - ''+ - '‹'+ - ''+ - '›'+ - ''+ - '', - contTemplate: '' - }; - DPGlobal.template = ''; - -}( window.jQuery ); \ No newline at end of file diff --git a/public/js/flowhistoryform.js b/public/js/flowhistoryform.js new file mode 100644 index 000000000..06aefd7b7 --- /dev/null +++ b/public/js/flowhistoryform.js @@ -0,0 +1,134 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +$(document).ready(function(){ + + /* Plot graph for data obtained from ajax call */ + $.getJSON('/rest/flowgraphdata?id=' + queryString()['flow-def-id'], function(data) { + updateExecTimezone(data); + + // Compute the jobDefId list such that the job numbers in the tooltip match the corresponding job in the table. + var jobDefList = []; + for (var i = data.length - 1 ; i >=0 ; i--) { + for (var j = 0; j < data[i].jobscores.length; j++) { + var jobDefUrl = data[i].jobscores[j]["jobdefurl"]; + if (jobDefList.indexOf(jobDefUrl) == -1) { + jobDefList.push(jobDefUrl); + } + } + } + + plotter(data, jobDefList); + }); + + loadTableTooltips(); +}); + +/** + * Example tooltip content: + * + * Sat Oct 17 2015 01:47:59 GMT+0530 (IST) + * Flow score = 163672 + * Top poor jobs + * Job 25 45% + * Job 16 20% + * job 14 10% + * + */ +function getGraphTooltipContent(record, jobDefList) { + + var content = document.createElement("div"); + content.style.textAlign = "center"; + + var heading = document.createElement("b"); + heading.appendChild(document.createTextNode(record.flowtime)); + heading.appendChild(document.createElement("br")); + + var details = document.createElement("p"); + details.appendChild(document.createTextNode("Flow Score = " + record.score)); + + var jobTable = document.createElement("table"); + if (record.score != 0) { + var jobLimit = 3; + details.appendChild(document.createElement("br")); + + var tableHeader = document.createElement("th"); + tableHeader.setAttribute("colspan", "2"); + tableHeader.style.padding = "3px"; + tableHeader.style.textAlign = "center"; + tableHeader.appendChild(document.createTextNode("Score Distribution")); + jobTable.appendChild(tableHeader); + + var scoreList = []; + for (var i = 0; i < record.jobscores.length; i++) { + var scoreWidth = record.jobscores[i]["jobscore"] * 100 / record.score; + scoreList.push([scoreWidth, i]); + } + + scoreList.sort(function(left, right) { + return left[0] > right[0] ? -1 : 1; + }); + + // Traverse ordered list + for (var jobIndex = 0; jobIndex < scoreList.length; jobIndex++) { + + var width = scoreList[jobIndex][0]; + var index = scoreList[jobIndex][1]; + + // Skip after jobLimit jobs are captured or when width is 0. + if (jobIndex >= jobLimit || width == 0) { + break; + } + + var jobDefUrl = record.jobscores[index]['jobdefurl']; + //var jobLink = "/jobhistory?job-def-id=" + encodeURIComponent(jobDefUrl); + var jobExecUrl = record.jobscores[index]['jobexecurl']; + var jobRef = document.createElement("a"); + jobRef.setAttribute("href", jobExecUrl); + jobRef.appendChild(document.createTextNode("Job " + (jobDefList.indexOf(jobDefUrl) + 1))); + + var tableCell1 = document.createElement("td"); + tableCell1.style.padding = "3px"; + tableCell1.style.border = "none"; + tableCell1.setAttribute("width", "65px"); + tableCell1.appendChild(jobRef); + + var jobScoreRect = document.createElement("div"); + jobScoreRect.style.padding = "3px"; + jobScoreRect.style.background = "red"; + jobScoreRect.style.width = width + "%"; + jobScoreRect.appendChild(document.createTextNode(+width.toFixed(2) + "%")); + + var tableCell2 = document.createElement("td"); + tableCell2.style.border = "none"; + tableCell2.appendChild(jobScoreRect); + + var tableRow = document.createElement("tr"); + tableRow.appendChild(tableCell1); + tableRow.appendChild(tableCell2); + + jobTable.appendChild(tableRow); + } + + jobTable.setAttribute("border", "2px solid black"); + jobTable.style.width = "100%"; + } + + content.appendChild(heading); + content.appendChild(details); + content.appendChild(jobTable); + return content; +} \ No newline at end of file diff --git a/public/js/flowresourcehistoryform.js b/public/js/flowresourcehistoryform.js new file mode 100644 index 000000000..d1f4b397f --- /dev/null +++ b/public/js/flowresourcehistoryform.js @@ -0,0 +1,38 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +$(document).ready(function(){ + + /* Plot graph for data obtained from ajax call */ + $.getJSON('/rest/flowmetricsgraphdata?id=' + queryString()['flow-def-id'], function(data) { + updateExecTimezone(data); + + // Compute the jobDefId list such that the job numbers in the tooltip match the corresponding job in the table. + var jobDefList = []; + for (var i = data.length - 1 ; i >=0 ; i--) { + for (var j = 0; j < data[i].jobmetrics.length; j++) { + var jobDefUrl = data[i].jobmetrics[j]["jobdefurl"]; + if (jobDefList.indexOf(jobDefUrl) == -1) { + jobDefList.push(jobDefUrl); + } + } + } + + plotter(data, jobDefList); + }); + + loadTableTooltips(); +}); diff --git a/public/js/flowtimehistoryform.js b/public/js/flowtimehistoryform.js new file mode 100644 index 000000000..9c6b9fc6f --- /dev/null +++ b/public/js/flowtimehistoryform.js @@ -0,0 +1,39 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +$(document).ready(function(){ + + /* Plot graph for data obtained from ajax call */ + $.getJSON('/rest/flowmetricsgraphdata?id=' + queryString()['flow-def-id'], function(data) { + updateExecTimezone(data); + + // Compute the jobDefId list such that the job numbers in the tooltip match the corresponding job in the table. + var jobDefList = []; + for (var i = data.length - 1 ; i >=0 ; i--) { + for (var j = 0; j < data[i].jobmetrics.length; j++) { + var jobDefUrl = data[i].jobmetrics[j]["jobdefurl"]; + if (jobDefList.indexOf(jobDefUrl) == -1) { + jobDefList.push(jobDefUrl); + } + } + } + + plotter(data, jobDefList); + }); + + loadTableTooltips(); +}); + diff --git a/public/js/graphresourcesmetricsutility.js b/public/js/graphresourcesmetricsutility.js new file mode 100644 index 000000000..162255f3b --- /dev/null +++ b/public/js/graphresourcesmetricsutility.js @@ -0,0 +1,329 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/* Show loading sign during ajax call */ +$(document).ajaxStart(function() { + $("#loading-indicator").show(); +}); + +$(document).ajaxStop(function() { + $("#loading-indicator").hide(); +}); + +/* Plot the performance graph for the data */ +function plotter(graphData, jobDefList) { + + graphData.forEach(function(d) { d.flowtime = new Date(d.flowtime); }); + + var graphContainer = d3.select("#visualisation"); + + /////////// DEFINE THE GRAPH ATTRIBUTES ///////////// + + // Define the Margins for the GRAPH Dimensions + var MARGINS = {top: 50, right: 50, bottom: 100, left: 50}, + WIDTH = graphContainer.style("width").replace("px", ""), + HEIGHT = graphContainer.style("height").replace("px", ""), + GRAPH_WIDTH = WIDTH - MARGINS.left - MARGINS.right, + GRAPH_HEIGHT = HEIGHT - MARGINS.top - MARGINS.bottom; + + // Set the domain of x + var millisDay = 86400000; // Offset to the domain. Also makes a single execution to be at the center. + var xRange = d3.time.scale().range([MARGINS.left, MARGINS.left + GRAPH_WIDTH]) + .domain([ + d3.min(graphData, function (d) { return Math.min(d.flowtime) - millisDay/2}), + d3.max(graphData, function (d) { return Math.max(d.flowtime) + millisDay/2}) + ]); + + // Set the domain of y + var yRange = d3.scale.linear().range([MARGINS.top + GRAPH_HEIGHT, MARGINS.top]) + .domain([0, d3.max(graphData, function (d) { return d.resourceused + d.resourceused/5; })]) + .nice(5); // Ensures a nice round value at the end of y axis + + // The graph function + var lineFunc = d3.svg.line() + .x(function (d) { return xRange(d.flowtime); }) + .y(function (d) { return yRange(d.resourceused); }) + .interpolate('linear'); + + + + var lineWastedFunc = d3.svg.line() + .x(function (d) { return xRange(d.flowtime); }) + .y(function (d) { return yRange(d.resourcewasted); }) + .interpolate('linear'); + + /* + var customTimeFormat = d3.time.format.multi([ + [".%L", function(d) { return d.getMilliseconds(); }], + [":%S", function(d) { return d.getSeconds(); }], + ["%I:%M", function(d) { return d.getMinutes(); }], + ["%I %p", function(d) { return d.getHours(); }], + ["%a %d", function(d) { return d.getDay() && d.getDate() != 1; }], + ["%b %d", function(d) { return d.getDate() != 1; }], + ["%B", function(d) { return d.getMonth(); }], + ["%Y", function() { return true; }] + ]); + */ + + var customTimeFormat = d3.time.format("%Y-%b-%d"); + + // x-axis definition + var xAxis = d3.svg.axis() + .scale(xRange) + .tickSize(0) + .orient("bottom") + .ticks(9) + .tickFormat(customTimeFormat); + + // y-axis definition + var yAxis = d3.svg.axis() + .scale(yRange) + //.tickFormat(function(d) { return d +"GB Hours"}) + //.tickSize(-1 * (GRAPH_WIDTH)) // Adds horizontal lines in the graph + .ticks(5) // Set 5 levels (5 horizontal lines) + .tickFormat(function(d) { + if((d/(1024*3600))>100.0) { + return d3.round(d/(1024*3600),0); // convert to GB Hours with 0 decimal places for large numbers + } else { + return d3.round(d/(1024*3600),2); // convert to GB Hours with 2 decimal places for small numbers + } + }) + .orient("left"); + + /////////// ADD CONTENTS TO THE GRAPH CONTAINER ///////////// + + // add the x axis + graphContainer.append("svg:g") + .attr("class", "x axis") + .attr("transform", "translate(0," + (HEIGHT - MARGINS.bottom) + ")") + .call(xAxis) + .selectAll("text") + .style("text-anchor","end") + .attr("dx", "-.8em") + .attr("dy", ".15em") + .attr("transform","rotate(-65)"); + + // Add the y-axis + graphContainer.append("svg:g") + .attr("class", "y axis") + .attr("transform", "translate(" + (MARGINS.left) + ", 0)") + .call(yAxis) + .selectAll("text") + .attr("fill", "rgb(0, 119, 181)"); + + // Add label for the y axis + graphContainer.append("svg:text") + .style("font-size", "16px") + .style("fill", "#606060") + .attr("transform", "translate(" + (MARGINS.left/10) + ", " + MARGINS.top/2 + ")") + .text("Resources(GB Hours)"); + + // Add the graph function + graphContainer.append("svg:path") + .attr("d", lineFunc(graphData)) + .attr("stroke", "#0077b5") + .attr("stroke-width", 1.5) + .attr("fill", "none"); + + graphContainer.append("svg:path") + .attr("d", lineWastedFunc(graphData)) + .attr("stroke", "#FF0000") + .attr("stroke-width", 1.5) + .attr("fill", "none"); + + + // Add the small rectangles to specify the graph meaning + graphContainer.append("rect") + .attr("x", GRAPH_WIDTH - 18) + .attr("width", 14) + .attr("height", 14) + .style("fill", "#0077b5" ); + + graphContainer.append("text") + .attr("x", GRAPH_WIDTH - 26) + .attr("y", 9) + .attr("dy", ".30em") + .style("text-anchor", "end") + .text(function(d) { return "Used Resources" }); + + graphContainer.append("rect") + .attr("x", GRAPH_WIDTH - 18) + .attr("y", 20) + .attr("width", 14) + .attr("height", 14) + .style("fill", "#FF0000" ); + + graphContainer.append("text") + .attr("x", GRAPH_WIDTH - 26) + .attr("y", 29) + .attr("dy", ".30em") + .style("text-anchor", "end") + .text(function(d) { return "Wasted Resources" }); + + + // Add the small bubble dots on the graph line + graphContainer.append("svg:g") + .selectAll("scatter-dots") + .data(graphData) + .enter().append("svg:circle") + .style({stroke: 'white', fill: '#0077b5'}) + .attr("cx", function (d) { return xRange(d.flowtime); } ) + .attr("cy", function (d) { return yRange(d.resourceused); } ) + .attr("r", 4); + + graphContainer.append("svg:g") + .selectAll("scatter-dots") + .data(graphData) + .enter().append("svg:circle") + .style({stroke: 'white', fill: '#FF0000'}) + .attr("cx", function (d) { return xRange(d.flowtime); } ) + .attr("cy", function (d) { return yRange(d.resourcewasted); } ) + .attr("r", 4); + + /////////// THE TOOLTIPS FOR THE GRAPH ///////////// + + // Add a transparent rectangle on top of the graph area to compute x-value mouse over + graphContainer.append("svg:rect") + .attr("class", "overlay") + .attr("width", GRAPH_WIDTH) + .attr("height", GRAPH_HEIGHT) + .attr("transform", "translate(" + (MARGINS.left) + ", " + (MARGINS.top) + ")") + .attr("opacity", 0) + .on("mouseover", function() { tooltip.style("display", null); }) // Reset tooltip display (default value) + .on("mousemove", mousemove); // Compute position and show the tooltip + + // The tooltip container (Top of the stack) + var tooltip = graphContainer.append("svg:g"); + + // Add the highlight bubble + var highlightCircleRad = 7; + tooltip.append("svg:circle") + .attr("stroke", "white") + .attr("fill", "#0077b5") + .attr("r", highlightCircleRad) + .style("display", "none"); + + tooltip.append("svg:circle") + .attr("stroke", "white") + .attr("fill", "#FF0000") + .attr("r", highlightCircleRad) + .style("display", "none").attr("class","bluecircle"); + + // Add the tooltip + var tooltipWidth = 260; + tooltip.append("foreignObject") + .attr("width", tooltipWidth + "px") + .append("xhtml:body") + .attr("id", "graph_tooltip") + .style("font-size", "12px") + .attr("class","graphColor") + .style("text-align", "center") + .style("border-radius", "5px") + .style("padding", "5px") + .style("border", "1.5px solid black"); + + var bisectExec = d3.bisector(function(d) { return d.flowtime; }).left; + + function mousemove(d) { + + // Compute tooltip to be shown depending on mouse position + var record; + if (graphData.length == 1) { + record = graphData[0]; + } else { + var xValueMouse = xRange.invert(MARGINS.left + d3.mouse(this)[0]), + index = bisectExec(graphData, xValueMouse, 1), + dleft = graphData[index - 1], + dright = graphData[index]; + record = xValueMouse - dleft.flowtime > dright.flowtime - xValueMouse ? dright : dleft; + } + + // Add content to tooltip + var graphTooltip = document.getElementById("graph_tooltip"); + graphTooltip.innerHTML = ''; + graphTooltip.appendChild(getGraphTooltipContent(record, jobDefList)); + + // Set position of highlighted circle + tooltip.select("circle") + .style("display", "inline") + .attr("transform", "translate(" + xRange(record.flowtime) + "," + yRange(record.resourceused) +")"); + + tooltip.select("circle.bluecircle") + .style("display", "inline") + .attr("transform", "translate(" + xRange(record.flowtime) + "," + yRange(record.resourcewasted) +")"); + + // Set position of tooltip. + var x = xRange(record.flowtime) - (tooltipWidth) - 10; + var y = yRange(record.resourceused) - tooltip.select("body").style("height").replace("px", "")/2; + + // Don't let the tooltip cross the left margin + if (x < MARGINS.left) { + x = xRange(record.flowtime) + 10; + } + + // Don't let the tooltip cross the bottom margin + if ((yRange(record.resourceused) + tooltip.select("body").style("height").replace("px", "")/2) >= yRange(0)) { + y = yRange(record.resourceused) - tooltip.select("body").style("height").replace("px", "") - 10; + } + + tooltip.select("foreignObject") + .attr("height", tooltip.select("body").style("height")); + tooltip.select("foreignObject") + .transition() + .duration(75) + .attr("transform", "translate(" + x + "," + y + ")"); + } +} + +/* Return the query parameters */ +function queryString() { + + var query_string = {}; + var query = window.location.search.substring(1); // Returns the query parameters excluding ? + var vars = query.split("&"); + + for (var i = 0; i < vars.length; i++) { + var pair = vars[i].split("="); + if (typeof query_string[pair[0]] === "undefined") { + query_string[pair[0]] = pair[1]; + } + } + return query_string; +} + +/* Update tooltip position on mouse-move over table */ +function loadTableTooltips() { + + var tooltipDiv = document.querySelectorAll('.hasTooltip div'); + window.onmousemove = function (e) { + var x = e.clientX, + y = e.clientY; + + for (var i = 0; i < tooltipDiv.length; i++) { + tooltipDiv[i].style.top = (y - tooltipDiv[i].offsetHeight - 10)+ "px"; + tooltipDiv[i].style.left = (x + 10) + "px"; + } + }; +} + +/* Update execution table with time in user timezone */ +function updateExecTimezone(data) { + var parse = d3.time.format("%b %d, %Y %I:%M %p"); + var time = document.querySelectorAll('.exectime'); + for (var i = time.length - 1; i >= 0; i--) { + time[i].innerHTML = parse(new Date(data[time.length - 1 - i].flowtime)); + } +} \ No newline at end of file diff --git a/public/js/graphtimemetricsutility.js b/public/js/graphtimemetricsutility.js new file mode 100644 index 000000000..cba252471 --- /dev/null +++ b/public/js/graphtimemetricsutility.js @@ -0,0 +1,325 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/* Show loading sign during ajax call */ +$(document).ajaxStart(function() { + $("#loading-indicator").show(); +}); + +$(document).ajaxStop(function() { + $("#loading-indicator").hide(); +}); + +/* Plot the performance graph for the data */ +function plotter(graphData, jobDefList) { + + graphData.forEach(function(d) { d.flowtime = new Date(d.flowtime); }); + + var graphContainer = d3.select("#visualisation"); + + /////////// DEFINE THE GRAPH ATTRIBUTES ///////////// + + // Define the Margins for the GRAPH Dimensions + var MARGINS = {top: 50, right: 50, bottom: 100, left: 50}, + WIDTH = graphContainer.style("width").replace("px", ""), + HEIGHT = graphContainer.style("height").replace("px", ""), + GRAPH_WIDTH = WIDTH - MARGINS.left - MARGINS.right, + GRAPH_HEIGHT = HEIGHT - MARGINS.top - MARGINS.bottom; + + // Set the domain of x + var millisDay = 86400000; // Offset to the domain. Also makes a single execution to be at the center. + var xRange = d3.time.scale().range([MARGINS.left, MARGINS.left + GRAPH_WIDTH]) + .domain([ + d3.min(graphData, function (d) { return Math.min(d.flowtime) - millisDay/2}), + d3.max(graphData, function (d) { return Math.max(d.flowtime) + millisDay/2}) + ]); + + // Set the domain of y + var yRange = d3.scale.linear().range([MARGINS.top + GRAPH_HEIGHT, MARGINS.top]) + .domain([0, d3.max(graphData, function (d) { return d.runtime + d.runtime/5; })]) + .nice(5); // Ensures a nice round value at the end of y axis + + // The graph function + var lineFunc = d3.svg.line() + .x(function (d) { return xRange(d.flowtime); }) + .y(function (d) { return yRange(d.runtime); }) + .interpolate('linear'); + + + + var lineWastedFunc = d3.svg.line() + .x(function (d) { return xRange(d.flowtime); }) + .y(function (d) { return yRange(d.waittime); }) + .interpolate('linear'); + + /* + var customTimeFormat = d3.time.format.multi([ + [".%L", function(d) { return d.getMilliseconds(); }], + [":%S", function(d) { return d.getSeconds(); }], + ["%I:%M", function(d) { return d.getMinutes(); }], + ["%I %p", function(d) { return d.getHours(); }], + ["%a %d", function(d) { return d.getDay() && d.getDate() != 1; }], + ["%b %d", function(d) { return d.getDate() != 1; }], + ["%B", function(d) { return d.getMonth(); }], + ["%Y", function() { return true; }] + ]); + */ + + var customTimeFormat = d3.time.format("%Y-%b-%d"); + + // x-axis definition + var xAxis = d3.svg.axis() + .scale(xRange) + .tickSize(0) + .orient("bottom") + .ticks(9) + .tickFormat(customTimeFormat); + + // y-axis definition + var yAxis = d3.svg.axis() + .scale(yRange) + //.tickSize(-1 * (GRAPH_WIDTH)) // Adds horizontal lines in the graph + .ticks(5) // Set 5 levels (5 horizontal lines) + .tickFormat(function(d) { + var minutes = Math.floor(d / 60000); + var seconds = ((d % 60000) / 1000).toFixed(0); + return minutes + ":" + (seconds < 10 ? '0' : '') + seconds; + }) + .orient("left"); + + /////////// ADD CONTENTS TO THE GRAPH CONTAINER ///////////// + + // Add the x-axis + graphContainer.append("svg:g") + .attr("class", "x axis") + .attr("transform", "translate(0," + (HEIGHT - MARGINS.bottom) + ")") + .call(xAxis) + .selectAll("text") + .style("text-anchor","end") + .attr("dx", "-.8em") + .attr("dy", ".15em") + .attr("transform","rotate(-65)"); + + // Add the y-axis + graphContainer.append("svg:g") + .attr("class", "y axis") + .attr("transform", "translate(" + (MARGINS.left) + ", 0)") + .call(yAxis) + .selectAll("text") + .attr("fill", "rgb(0, 119, 181)"); + + // Add label for the y axis + graphContainer.append("svg:text") + .style("font-size", "16px") + .style("fill", "#606060") + .attr("transform", "translate(" + (MARGINS.left/10) + ", " + MARGINS.top/2 + ")") + .text("Run Time (mm:ss)"); + + // Add the graph function + graphContainer.append("svg:path") + .attr("d", lineFunc(graphData)) + .attr("stroke", "#0077b5") + .attr("stroke-width", 1.5) + .attr("fill", "none"); + + graphContainer.append("svg:path") + .attr("d", lineWastedFunc(graphData)) + .attr("stroke", "#FF0000") + .attr("stroke-width", 1.5) + .attr("fill", "none"); + + // Add the small bubble dots on the graph line + graphContainer.append("svg:g") + .selectAll("scatter-dots") + .data(graphData) + .enter().append("svg:circle") + .style({stroke: 'white', fill: '#0077b5'}) + .attr("cx", function (d) { return xRange(d.flowtime); } ) + .attr("cy", function (d) { return yRange(d.runtime); } ) + .attr("r", 4); + + graphContainer.append("svg:g") + .selectAll("scatter-dots") + .data(graphData) + .enter().append("svg:circle") + .style({stroke: 'white', fill: '#FF0000'}) + .attr("cx", function (d) { return xRange(d.flowtime); } ) + .attr("cy", function (d) { return yRange(d.waittime); } ) + .attr("r", 4); + + /////////// THE TOOLTIPS FOR THE GRAPH ///////////// + + // Add a transparent rectangle on top of the graph area to compute x-value mouse over + graphContainer.append("svg:rect") + .attr("class", "overlay") + .attr("width", GRAPH_WIDTH) + .attr("height", GRAPH_HEIGHT) + .attr("transform", "translate(" + (MARGINS.left) + ", " + (MARGINS.top) + ")") + .attr("opacity", 0) + .on("mouseover", function() { tooltip.style("display", null); }) // Reset tooltip display (default value) + .on("mousemove", mousemove); // Compute position and show the tooltip + + // The tooltip container (Top of the stack) + var tooltip = graphContainer.append("svg:g"); + + + // Add the small rectangles to specify the graph meaning + graphContainer.append("rect") + .attr("x", GRAPH_WIDTH - 18) + .attr("width", 14) + .attr("height", 14) + .style("fill", "#0077b5" ); + + graphContainer.append("text") + .attr("x", GRAPH_WIDTH - 26) + .attr("y", 9) + .attr("dy", ".30em") + .style("text-anchor", "end") + .text(function(d) { return "Run Time" }); + + graphContainer.append("rect") + .attr("x", GRAPH_WIDTH - 18) + .attr("y", 20) + .attr("width", 14) + .attr("height", 14) + .style("fill", "#FF0000" ); + + graphContainer.append("text") + .attr("x", GRAPH_WIDTH - 26) + .attr("y", 29) + .attr("dy", ".30em") + .style("text-anchor", "end") + .text(function(d) { return "Wait Time" }); + + // Add the highlight bubble + var highlightCircleRad = 7; + tooltip.append("svg:circle") + .attr("stroke", "white") + .attr("fill", "#0077b5") + .attr("r", highlightCircleRad) + .style("display", "none"); + + tooltip.append("svg:circle") + .attr("stroke", "white") + .attr("fill", "#FF0000") + .attr("r", highlightCircleRad) + .style("display", "none").attr("class","bluecircle"); + + // Add the tooltip + var tooltipWidth = 260; + tooltip.append("foreignObject") + .attr("width", tooltipWidth + "px") + .append("xhtml:body") + .attr("id", "graph_tooltip") + .style("font-size", "12px") + .attr("class","graphColor") + .style("text-align", "center") + .style("border-radius", "5px") + .style("padding", "5px") + .style("border", "1.5px solid black"); + + var bisectExec = d3.bisector(function(d) { return d.flowtime; }).left; + + function mousemove(d) { + + // Compute tooltip to be shown depending on mouse position + var record; + if (graphData.length == 1) { + record = graphData[0]; + } else { + var xValueMouse = xRange.invert(MARGINS.left + d3.mouse(this)[0]), + index = bisectExec(graphData, xValueMouse, 1), + dleft = graphData[index - 1], + dright = graphData[index]; + record = xValueMouse - dleft.flowtime > dright.flowtime - xValueMouse ? dright : dleft; + } + + // Add content to tooltip + var graphTooltip = document.getElementById("graph_tooltip"); + graphTooltip.innerHTML = ''; + graphTooltip.appendChild(getGraphTooltipContent(record, jobDefList)); + + // Set position of highlighted circle + tooltip.select("circle") + .style("display", "inline") + .attr("transform", "translate(" + xRange(record.flowtime) + "," + yRange(record.runtime) +")"); + + tooltip.select("circle.bluecircle") + .style("display", "inline") + .attr("transform", "translate(" + xRange(record.flowtime) + "," + yRange(record.waittime) +")"); + + // Set position of tooltip. + var x = xRange(record.flowtime) - (tooltipWidth) - 10; + var y = yRange(record.runtime) - tooltip.select("body").style("height").replace("px", "")/2; + + // Don't let the tooltip cross the left margin + if (x < MARGINS.left) { + x = xRange(record.flowtime) + 10; + } + + // Don't let the tooltip cross the bottom margin + if ((yRange(record.runtime) + tooltip.select("body").style("height").replace("px", "")/2) >= yRange(0)) { + y = yRange(record.runtime) - tooltip.select("body").style("height").replace("px", "") - 10; + } + + tooltip.select("foreignObject") + .attr("height", tooltip.select("body").style("height")); + tooltip.select("foreignObject") + .transition() + .duration(75) + .attr("transform", "translate(" + x + "," + y + ")"); + } +} + +/* Return the query parameters */ +function queryString() { + + var query_string = {}; + var query = window.location.search.substring(1); // Returns the query parameters excluding ? + var vars = query.split("&"); + + for (var i = 0; i < vars.length; i++) { + var pair = vars[i].split("="); + if (typeof query_string[pair[0]] === "undefined") { + query_string[pair[0]] = pair[1]; + } + } + return query_string; +} + +/* Update tooltip position on mouse-move over table */ +function loadTableTooltips() { + + var tooltipDiv = document.querySelectorAll('.hasTooltip div'); + window.onmousemove = function (e) { + var x = e.clientX, + y = e.clientY; + + for (var i = 0; i < tooltipDiv.length; i++) { + tooltipDiv[i].style.top = (y - tooltipDiv[i].offsetHeight - 10)+ "px"; + tooltipDiv[i].style.left = (x + 10) + "px"; + } + }; +} + +/* Update execution table with time in user timezone */ +function updateExecTimezone(data) { + var parse = d3.time.format("%b %d, %Y %I:%M %p"); + var time = document.querySelectorAll('.exectime'); + for (var i = time.length - 1; i >= 0; i--) { + time[i].innerHTML = parse(new Date(data[time.length - 1 - i].flowtime)); + } +} \ No newline at end of file diff --git a/public/js/graphutility.js b/public/js/graphutility.js new file mode 100644 index 000000000..bc64b3b84 --- /dev/null +++ b/public/js/graphutility.js @@ -0,0 +1,261 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/* Show loading sign during ajax call */ +$(document).ajaxStart(function() { + $("#loading-indicator").show(); +}); + +$(document).ajaxStop(function() { + $("#loading-indicator").hide(); +}); + +/* Plot the performance graph for the data */ +function plotter(graphData, jobDefList) { + + graphData.forEach(function(d) { d.flowtime = new Date(d.flowtime); }); + + var graphContainer = d3.select("#visualisation"); + + /////////// DEFINE THE GRAPH ATTRIBUTES ///////////// + + // Define the Margins for the GRAPH Dimensions + var MARGINS = {top: 50, right: 50, bottom: 100, left: 50}, + WIDTH = graphContainer.style("width").replace("px", ""), + HEIGHT = graphContainer.style("height").replace("px", ""), + GRAPH_WIDTH = WIDTH - MARGINS.left - MARGINS.right, + GRAPH_HEIGHT = HEIGHT - MARGINS.top - MARGINS.bottom; + + // Set the domain of x + var millisDay = 86400000; // Offset to the domain. Also makes a single execution to be at the center. + var xRange = d3.time.scale().range([MARGINS.left, MARGINS.left + GRAPH_WIDTH]) + .domain([ + d3.min(graphData, function (d) { return Math.min(d.flowtime) - millisDay/2}), + d3.max(graphData, function (d) { return Math.max(d.flowtime) + millisDay/2}) + ]); + + // Set the domain of y + var yRange = d3.scale.linear().range([MARGINS.top + GRAPH_HEIGHT, MARGINS.top]) + .domain([0, d3.max(graphData, function (d) { return d.score + d.score/5; })]) + .nice(5); // Ensures a nice round value at the end of y axis + + // The graph function + var lineFunc = d3.svg.line() + .x(function (d) { return xRange(d.flowtime); }) + .y(function (d) { return yRange(d.score); }) + .interpolate('linear'); + + /* + var customTimeFormat = d3.time.format.multi([ + [".%L", function(d) { return d.getMilliseconds(); }], + [":%S", function(d) { return d.getSeconds(); }], + ["%I:%M", function(d) { return d.getMinutes(); }], + ["%I %p", function(d) { return d.getHours(); }], + ["%a %d", function(d) { return d.getDay() && d.getDate() != 1; }], + ["%b %d", function(d) { return d.getDate() != 1; }], + ["%B", function(d) { return d.getMonth(); }], + ["%Y", function() { return true; }] + ]); + */ + + var customTimeFormat = d3.time.format("%Y-%b-%d"); + + // x-axis definition + var xAxis = d3.svg.axis() + .scale(xRange) + .tickSize(0) + .orient("bottom") + .ticks(9) + .tickFormat(customTimeFormat); + + // y-axis definition + var yAxis = d3.svg.axis() + .scale(yRange) + //.tickSize(-1 * (GRAPH_WIDTH)) // Adds horizontal lines in the graph + .ticks(5) // Set 5 levels (5 horizontal lines) + .tickFormat(d3.format("s")) + .orient("left"); + + /////////// ADD CONTENTS TO THE GRAPH CONTAINER ///////////// + + // Add the x-axis + graphContainer.append("svg:g") + .attr("class", "x axis") + .attr("transform", "translate(0," + (HEIGHT - MARGINS.bottom) + ")") + .call(xAxis) + .selectAll("text") + .style("text-anchor","end") + .attr("dx", "-.8em") + .attr("dy", ".15em") + .attr("transform","rotate(-65)"); + + // Add the y-axis + graphContainer.append("svg:g") + .attr("class", "y axis") + .attr("transform", "translate(" + (MARGINS.left) + ", 0)") + .call(yAxis) + .selectAll("text") + .attr("fill", "rgb(0, 119, 181)"); + + // Add label for the y axis + graphContainer.append("svg:text") + .style("font-size", "16px") + .style("fill", "#606060") + .attr("transform", "translate(" + (MARGINS.left/10) + ", " + MARGINS.top/2 + ")") + .text("Performance Score (Lower the better)"); + + // Add the graph function + graphContainer.append("svg:path") + .attr("d", lineFunc(graphData)) + .attr("stroke", "#0077b5") + .attr("stroke-width", 1.5) + .attr("fill", "none"); + + // Add the small bubble dots on the graph line + graphContainer.append("svg:g") + .selectAll("scatter-dots") + .data(graphData) + .enter().append("svg:circle") + .style({stroke: 'white', fill: '#0077b5'}) + .attr("cx", function (d) { return xRange(d.flowtime); } ) + .attr("cy", function (d) { return yRange(d.score); } ) + .attr("r", 4); + + + /////////// THE TOOLTIPS FOR THE GRAPH ///////////// + + // Add a transparent rectangle on top of the graph area to compute x-value mouse over + graphContainer.append("svg:rect") + .attr("class", "overlay") + .attr("width", GRAPH_WIDTH) + .attr("height", GRAPH_HEIGHT) + .attr("transform", "translate(" + (MARGINS.left) + ", " + (MARGINS.top) + ")") + .attr("opacity", 0) + .on("mouseover", function() { tooltip.style("display", null); }) // Reset tooltip display (default value) + .on("mousemove", mousemove); // Compute position and show the tooltip + + // The tooltip container (Top of the stack) + var tooltip = graphContainer.append("svg:g"); + + // Add the highlight bubble + var highlightCircleRad = 7; + tooltip.append("svg:circle") + .attr("stroke", "white") + .attr("fill", "#0077b5") + .attr("r", highlightCircleRad) + .style("display", "none"); + + // Add the tooltip + var tooltipWidth = 260; + tooltip.append("foreignObject") + .attr("width", tooltipWidth + "px") + .append("xhtml:body") + .attr("id", "graph_tooltip") + .style("font-size", "12px") + .attr("class","graphColor") + .style("text-align", "center") + .style("border-radius", "5px") + .style("padding", "5px") + .style("border", "1.5px solid black"); + + var bisectExec = d3.bisector(function(d) { return d.flowtime; }).left; + + function mousemove(d) { + + // Compute tooltip to be shown depending on mouse position + var record; + if (graphData.length == 1) { + record = graphData[0]; + } else { + var xValueMouse = xRange.invert(MARGINS.left + d3.mouse(this)[0]), + index = bisectExec(graphData, xValueMouse, 1), + dleft = graphData[index - 1], + dright = graphData[index]; + record = xValueMouse - dleft.flowtime > dright.flowtime - xValueMouse ? dright : dleft; + } + + // Add content to tooltip + var graphTooltip = document.getElementById("graph_tooltip"); + graphTooltip.innerHTML = ''; + graphTooltip.appendChild(getGraphTooltipContent(record, jobDefList)); + + // Set position of highlighted circle + tooltip.select("circle") + .style("display", "inline") + .attr("transform", "translate(" + xRange(record.flowtime) + "," + yRange(record.score) +")"); + + // Set position of tooltip. + var x = xRange(record.flowtime) - (tooltipWidth) - 10; + var y = yRange(record.score) - tooltip.select("body").style("height").replace("px", "")/2; + + // Don't let the tooltip cross the left margin + if (x < MARGINS.left) { + x = xRange(record.flowtime) + 10; + } + + // Don't let the tooltip cross the bottom margin + if ((yRange(record.score) + tooltip.select("body").style("height").replace("px", "")/2) >= yRange(0)) { + y = yRange(record.score) - tooltip.select("body").style("height").replace("px", "") - 10; + } + + tooltip.select("foreignObject") + .attr("height", tooltip.select("body").style("height")); + tooltip.select("foreignObject") + .transition() + .duration(75) + .attr("transform", "translate(" + x + "," + y + ")"); + } +} + +/* Return the query parameters */ +function queryString() { + + var query_string = {}; + var query = window.location.search.substring(1); // Returns the query parameters excluding ? + var vars = query.split("&"); + + for (var i = 0; i < vars.length; i++) { + var pair = vars[i].split("="); + if (typeof query_string[pair[0]] === "undefined") { + query_string[pair[0]] = pair[1]; + } + } + return query_string; +} + +/* Update tooltip position on mouse-move over table */ +function loadTableTooltips() { + + var tooltipDiv = document.querySelectorAll('.hasTooltip div'); + window.onmousemove = function (e) { + var x = e.clientX, + y = e.clientY; + + for (var i = 0; i < tooltipDiv.length; i++) { + tooltipDiv[i].style.top = (y - tooltipDiv[i].offsetHeight - 10)+ "px"; + tooltipDiv[i].style.left = (x + 10) + "px"; + } + }; +} + +/* Update execution table with time in user timezone */ +function updateExecTimezone(data) { + var parse = d3.time.format("%b %d, %Y %I:%M %p"); + var time = document.querySelectorAll('.exectime'); + for (var i = time.length - 1; i >= 0; i--) { + time[i].innerHTML = parse(new Date(data[time.length - 1 - i].flowtime)); + } +} \ No newline at end of file diff --git a/public/js/jobhistoryform.js b/public/js/jobhistoryform.js new file mode 100644 index 000000000..4d0eba488 --- /dev/null +++ b/public/js/jobhistoryform.js @@ -0,0 +1,113 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +$(document).ready(function(){ + + /* Plot graph for data obtained from ajax call */ + $.getJSON('/rest/jobgraphdata?id=' + queryString()['job-def-id'], function(data) { + updateExecTimezone(data); + plotter(data, []); + }); + + loadTableTooltips(); +}); + +/** + * Example tooltip content: + * + * Sat Oct 17 2015 01:47:59 GMT+0530 (IST) + * Job score = 163672 + * Top poor stages + * Stage 1 65% + * Stage 26 25% + * Stage 12 11% + */ +function getGraphTooltipContent(record, jobDefList) { + + var content = document.createElement("div"); + content.style.textAlign = "center"; + + var heading = document.createElement("b"); + heading.appendChild(document.createTextNode(record.flowtime)); + heading.appendChild(document.createElement("br")); + + var details = document.createElement("p"); + details.appendChild(document.createTextNode("Job Score = " + record.score)); + + var jobTable = document.createElement("table"); + if (record.score != 0) { + var jobLimit = 3; + details.appendChild(document.createElement("br")); + + var tableHeader = document.createElement("th"); + tableHeader.setAttribute("colspan", "2"); + tableHeader.style.padding = "3px"; + tableHeader.style.textAlign = "center"; + tableHeader.appendChild(document.createTextNode("Score Distribution")); + jobTable.appendChild(tableHeader); + + var scoreList = []; + for (var i = 0; i < record.stagescores.length; i++) { + var scoreWidth = record.stagescores[i]["stagescore"] * 100 / record.score; + scoreList.push([scoreWidth, i]); + } + + scoreList.sort(function (left, right) { + return left[0] > right[0] ? -1 : 1; + }); + + for (var stageIndex = 0; stageIndex < scoreList.length; stageIndex++) { + + var width = scoreList[stageIndex][0]; + var index = scoreList[stageIndex][1]; + + // Skip after jobLimit jobs are captured or when width becomes 0. + if (stageIndex >= jobLimit || width == 0) { + break; + } + + var tableCell1 = document.createElement("td"); + tableCell1.style.padding = "3px"; + tableCell1.style.border = "none"; + tableCell1.setAttribute("width", "65px"); + tableCell1.appendChild(document.createTextNode("Stage " + (index + 1))); + + var stageScoreRect = document.createElement("div"); + stageScoreRect.style.padding = "3px"; + stageScoreRect.style.background = "red"; + stageScoreRect.style.width = width + "%"; + stageScoreRect.appendChild(document.createTextNode(+width.toFixed(2) + "%")); + + var tableCell2 = document.createElement("td"); + tableCell2.style.border = "none"; + tableCell2.appendChild(stageScoreRect); + + var tableRow = document.createElement("tr"); + tableRow.appendChild(tableCell1); + tableRow.appendChild(tableCell2); + + jobTable.appendChild(tableRow); + } + + jobTable.setAttribute("border", "2px solid black"); + jobTable.style.width = "100%"; + } + + content.appendChild(heading); + content.appendChild(details); + content.appendChild(jobTable); + return content; +} \ No newline at end of file diff --git a/public/js/jobresourcesmetricshistoryform.js b/public/js/jobresourcesmetricshistoryform.js new file mode 100644 index 000000000..0264d9282 --- /dev/null +++ b/public/js/jobresourcesmetricshistoryform.js @@ -0,0 +1,27 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +$(document).ready(function(){ + + /* Plot graph for data obtained from ajax call */ + $.getJSON('/rest/jobmetricsgraphdata?id=' + queryString()['job-def-id'], function(data) { + updateExecTimezone(data); + plotter(data, []); + }); + + loadTableTooltips(); +}); + diff --git a/public/js/jobtimehistoryform.js b/public/js/jobtimehistoryform.js new file mode 100644 index 000000000..0264d9282 --- /dev/null +++ b/public/js/jobtimehistoryform.js @@ -0,0 +1,27 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +$(document).ready(function(){ + + /* Plot graph for data obtained from ajax call */ + $.getJSON('/rest/jobmetricsgraphdata?id=' + queryString()['job-def-id'], function(data) { + updateExecTimezone(data); + plotter(data, []); + }); + + loadTableTooltips(); +}); + diff --git a/public/js/resourcegraphtooltiputility.js b/public/js/resourcegraphtooltiputility.js new file mode 100644 index 000000000..26a19a7c7 --- /dev/null +++ b/public/js/resourcegraphtooltiputility.js @@ -0,0 +1,97 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +function getGraphTooltipContent(record, jobDefList) { + + var content = document.createElement("div"); + content.style.textAlign = "center"; + + var heading = document.createElement("b"); + heading.appendChild(document.createTextNode(record.flowtime)); + heading.appendChild(document.createElement("br")); + + var resourcesTable = document.createElement("table"); + if (record.resourceused != 0) { + var jobLimit = 3; + + var tableHeader = document.createElement("th"); + tableHeader.setAttribute("colspan", "2"); + tableHeader.style.padding = "3px"; + tableHeader.style.textAlign = "center"; + tableHeader.style.width = "100%"; + tableHeader.appendChild(document.createTextNode("Resources")); + resourcesTable.appendChild(tableHeader); + + // add total used resources + var tableCell1 = document.createElement("td"); + tableCell1.style.padding = "3px"; + tableCell1.style.border = "none"; + tableCell1.setAttribute("width", "90px"); + tableCell1.appendChild(document.createTextNode("Used (GB Hrs)")); + + var stageScoreRect = document.createElement("div"); + stageScoreRect.style.padding = "3px"; + stageScoreRect.style.background = "#0077b5"; + stageScoreRect.style.width = "100%"; + stageScoreRect.appendChild(document.createTextNode(parseFloat(Math.round((record.resourceused)/(1024*3600)*100)/100).toFixed(2))); + console.log(record.resourceused); + + var tableCell2 = document.createElement("td"); + tableCell2.style.border = "none"; + tableCell2.appendChild(stageScoreRect); + + var tableRow = document.createElement("tr"); + tableRow.appendChild(tableCell1); + tableRow.appendChild(tableCell2); + + resourcesTable.appendChild(tableRow); + + // add total resourcewasted + var tableCell3 = document.createElement("td"); + tableCell3.style.padding = "3px"; + tableCell3.style.border = "none"; + tableCell3.setAttribute("width", "65px"); + tableCell3.appendChild(document.createTextNode("Wasted (GB Hrs)")); + + var resourcewastedpercent = (record.resourcewasted/ record.resourceused) * 100; + + var stageScoreRect2 = document.createElement("div"); + stageScoreRect2.style.padding = "3px"; + stageScoreRect2.style.background = "red"; + stageScoreRect2.style.width = (Math.floor(resourcewastedpercent+1)) + "%"; + stageScoreRect2.appendChild(document.createTextNode(parseFloat(Math.round(record.resourcewasted/(1024*3600) * 100)/100).toFixed(2) + "(" + Math.floor(resourcewastedpercent) + "%)")); + + console.log(record.resourcewasted + "(" + resourcewastedpercent + "%)"); + + var tableCell4 = document.createElement("td"); + tableCell4.style.border = "none"; + tableCell4.appendChild(stageScoreRect2); + + var tableRow2 = document.createElement("tr"); + tableRow2.appendChild(tableCell3); + tableRow2.appendChild(tableCell4); + + resourcesTable.appendChild(tableRow2); + + resourcesTable.setAttribute("border", "2px solid black"); + resourcesTable.style.width = "100%"; + } + + content.appendChild(heading); + content.appendChild(resourcesTable); + content.style.padding = "0"; + return content; +} \ No newline at end of file diff --git a/public/js/script.js b/public/js/script.js deleted file mode 100644 index 4594c7855..000000000 --- a/public/js/script.js +++ /dev/null @@ -1,80 +0,0 @@ -$(document).ready(function(){ - - var form = $("#search-form"); - - - var jobid = $("#form-job-id"); - var user = $("#form-user"); - var jobtypeEnable = $("#form-jobtype-enable"); - var jobtype = $("#form-jobtype"); - var severityEnable = $("#form-severity-enable"); - var severity = $("#form-severity"); - var analysis = $("#form-analysis"); - var datetimeEnable = $("#form-datetime-enable"); - var startDate = $("#form-start-date"); - var endDate = $("#form-end-date"); - - startDate.datepicker(); - endDate.datepicker(); - - var updateForm = function(){ - if(jobid.val()) { - user.prop('disabled', true); - severity.prop('disabled', true); - analysis.prop('disabled', true); - jobtypeEnable.prop('disabled', true); - severityEnable.prop('disabled', true); - datetimeEnable.prop('disabled', true); - startDate.prop('disabled', true); - endDate.prop('disabled', true); - } - else{ - jobtypeEnable.prop('disabled', false); - severityEnable.prop('disabled', false); - datetimeEnable.prop('disabled', false); - user.prop('disabled', false); - if(jobtypeEnable.prop('checked')){ - jobtype.prop('disabled', false); - } - else { - jobtype.prop('disabled', true); - } - if(severityEnable.prop('checked')){ - severity.prop('disabled', false); - analysis.prop('disabled', false); - } - else { - severity.prop('disabled', true); - analysis.prop('disabled', true); - } - if(datetimeEnable.prop('checked')){ - startDate.prop('disabled', false); - endDate.prop('disabled', false); - } - else { - startDate.prop('disabled', true); - endDate.prop('disabled', true); - } - } - } - - jobid.on("propertychange keyup input paste", updateForm); - jobtypeEnable.change(updateForm); - severityEnable.change(updateForm); - datetimeEnable.change(updateForm); - - form.submit(function(event){ - var data = form.serialize(); - localStorage.setItem('search-form', data); - //Remove useless fields from the URL - form.find('input[name]').filter(function(){return !$(this).val();}).attr('name', ''); - }); - - try { - var data = localStorage.getItem('search-form'); - form.deserialize(data); - } - catch(e){} - - updateForm(); -}); \ No newline at end of file diff --git a/public/js/searchform.js b/public/js/searchform.js new file mode 100644 index 000000000..0e5719b77 --- /dev/null +++ b/public/js/searchform.js @@ -0,0 +1,143 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +$(document).ready(function(){ + + var form = $("#search-form"); + var formSubmit = $("#submit-button"); + + var jobId = $("#form-job-id"); + var flowExecId = $("#form-flow-exec-id"); + var user = $("#form-username"); + var queueName = $("#form-queue-name"); + var jobtypeEnable = $("#form-job-type-enable"); + var jobtype = $("#form-job-type"); + var severityEnable = $("#form-severity-enable"); + var severity = $("#form-severity"); + var analysis = $("#form-analysis"); + var datetimeEnable = $("#form-datetime-enable"); + var finishTimeBeginDate = $("#form-finished-time-begin-date"); + var finishTimeEndDate = $("#form-finished-time-end-date"); + var finishTimeBeginTimestamp = $("#form-finished-time-begin"); + var finishTimeEndTimestamp = $("#form-finished-time-end"); + + finishTimeBeginDate.datepicker({ + autoclose: true, + todayHighlight: true, + }); + finishTimeEndDate.datepicker({ + autoclose: true, + todayHighlight: true, + }); + + var updateForm = function(){ + if(jobId.val()) { + flowExecId.prop('disabled', true); + user.prop('disabled', true); + queueName.prop('disabled', true); + severity.prop('disabled', true); + analysis.prop('disabled', true); + jobtype.prop('disabled', true); + jobtypeEnable.prop('disabled', true); + severityEnable.prop('disabled', true); + datetimeEnable.prop('disabled', true); + finishTimeBeginDate.prop('disabled', true); + finishTimeEndDate.prop('disabled', true); + } else if(flowExecId.val()) { + jobId.prop('disabled', true); + user.prop('disabled', true); + queueName.prop('disabled', true); + severity.prop('disabled', true); + analysis.prop('disabled', true); + jobtype.prop('disabled', true); + jobtypeEnable.prop('disabled', true); + severityEnable.prop('disabled', true); + datetimeEnable.prop('disabled', true); + finishTimeBeginDate.prop('disabled', true); + finishTimeEndDate.prop('disabled', true); + } + else{ + jobId.prop('disabled', false); + flowExecId.prop('disabled', false); + jobtypeEnable.prop('disabled', false); + severityEnable.prop('disabled', false); + datetimeEnable.prop('disabled', false); + user.prop('disabled', false); + queueName.prop('disabled', false); + if(jobtypeEnable.prop('checked')){ + jobtype.prop('disabled', false); + } + else { + jobtype.prop('disabled', true); + } + if(severityEnable.prop('checked')){ + severity.prop('disabled', false); + analysis.prop('disabled', false); + } + else { + severity.prop('disabled', true); + analysis.prop('disabled', true); + } + if(datetimeEnable.prop('checked')){ + finishTimeBeginDate.prop('disabled', false); + finishTimeEndDate.prop('disabled', false); + } + else { + finishTimeBeginDate.prop('disabled', true); + finishTimeEndDate.prop('disabled', true); + } + } + } + jobId.on("propertychange keyup input paste", updateForm); + flowExecId.on("propertychange keyup input paste", updateForm); + jobtypeEnable.change(updateForm); + severityEnable.change(updateForm); + datetimeEnable.change(updateForm); + + formSubmit.click(function() { + + var formParams = form.serialize(); + + // Convert the dates from user time-zone to epoch timestamp + if(datetimeEnable.prop('checked')) { + var dateBegin = finishTimeBeginDate.val(); + if (dateBegin !== '') { + finishTimeBeginTimestamp.val(new Date(dateBegin).getTime()); + finishTimeBeginDate.val(''); // Remove this parameter from appearing in url + } + var dateEnd = finishTimeEndDate.val(); + if (dateEnd !== '') { + finishTimeEndTimestamp.val(new Date(dateEnd).getTime()); + finishTimeEndDate.val(''); // Remove this parameter from appearing in url + } + } + + // Cache the search parameters + localStorage.setItem('search-form', formParams); + //Remove useless fields from the URL + form.find('input[name]').filter(function(){return !$(this).val();}).attr('name', ''); + + form.submit(); + }); + + try { + var data = localStorage.getItem('search-form'); + form.deserialize(data); + } + catch(e){} + + updateForm(); +}); \ No newline at end of file diff --git a/public/js/searchpanel.js b/public/js/searchpanel.js new file mode 100644 index 000000000..505d32081 --- /dev/null +++ b/public/js/searchpanel.js @@ -0,0 +1,21 @@ + +$('.search_opt').on("click",function(){ + $('#search_concept').text($(this).text()) + console.log($(this).text()); +}); + +$('#search_button').on("click", function() { + let text = $('#primary_search').val(); + let type = $('#search_concept').text(); + let encodedurl = encodeURIComponent(text); + if(type=='Workflow') { + let transition = "/new#/workflow?workflowid=" + encodedurl; + window.location = transition + } else if (type=='Job') { + let transition = "/new#/job?jobid=" + encodedurl; + window.location = transition; + } else if (type=='Application') { + let transition = "/new#/app?applicationid=" + encodedurl; + window.location = transition; + } +}); \ No newline at end of file diff --git a/public/js/timegraphtooltiputility.js b/public/js/timegraphtooltiputility.js new file mode 100644 index 000000000..298f185b0 --- /dev/null +++ b/public/js/timegraphtooltiputility.js @@ -0,0 +1,121 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +function getGraphTooltipContent(record, jobDefList) { + + var content = document.createElement("div"); + content.style.textAlign = "center"; + + var heading = document.createElement("b"); + heading.appendChild(document.createTextNode(record.flowtime)); + heading.appendChild(document.createElement("br")); + + //var details = document.createElement("p"); + //details.appendChild(document.createTextNode("Job Score = " + record.totalruntime)); + + var runtimeTable = document.createElement("table"); + if (record.runtime != 0) { + var jobLimit = 3; + + var tableHeader = document.createElement("th"); + tableHeader.setAttribute("colspan", "2"); + tableHeader.style.padding = "3px"; + tableHeader.style.textAlign = "center"; + tableHeader.style.width = "100%"; + tableHeader.appendChild(document.createTextNode("Time")); + runtimeTable.appendChild(tableHeader); + + var maxRuntime = 0; + for (var i = 0; i < record.jobmetrics.length; i++) { + if(record.jobmetrics[i]["runtime"]> maxRuntime) { + maxRuntime = record.jobmetrics[i]["runtime"] + } + } + + // add total runtime + var tableCell1 = document.createElement("td"); + tableCell1.style.padding = "3px"; + tableCell1.style.border = "none"; + tableCell1.setAttribute("width", "90px"); + tableCell1.appendChild(document.createTextNode("Run Time (hh:mm:ss)")); + + var stageScoreRect = document.createElement("div"); + stageScoreRect.style.padding = "3px"; + stageScoreRect.style.background = "#0077b5"; + stageScoreRect.style.width = "100%"; + stageScoreRect.appendChild(document.createTextNode(msToHMS(record.runtime))); + console.log(record.runtime); + + var tableCell2 = document.createElement("td"); + tableCell2.style.border = "none"; + tableCell2.appendChild(stageScoreRect); + + var tableRow = document.createElement("tr"); + tableRow.appendChild(tableCell1); + tableRow.appendChild(tableCell2); + + runtimeTable.appendChild(tableRow); + + // add total waittime + var tableCell3 = document.createElement("td"); + tableCell3.style.padding = "3px"; + tableCell3.style.border = "none"; + tableCell3.setAttribute("width", "65px"); + tableCell3.appendChild(document.createTextNode("Wait Time (hh:mm:ss)")); + + var waittimepercent = (record.waittime/ record.runtime) * 100; + + var stageScoreRect2 = document.createElement("div"); + stageScoreRect2.style.padding = "3px"; + stageScoreRect2.style.background = "red"; + stageScoreRect2.style.width = (Math.floor(waittimepercent+1)) + "%"; + stageScoreRect2.appendChild(document.createTextNode(msToHMS(record.waittime) + "(" + Math.floor(waittimepercent) + "%)")); + + console.log(record.waittime + "(" + waittimepercent + "%)"); + + var tableCell4 = document.createElement("td"); + tableCell4.style.border = "none"; + tableCell4.appendChild(stageScoreRect2); + + var tableRow2 = document.createElement("tr"); + tableRow2.appendChild(tableCell3); + tableRow2.appendChild(tableCell4); + + runtimeTable.appendChild(tableRow2); + + runtimeTable.setAttribute("border", "2px solid black"); + runtimeTable.style.width = "100%"; + } + + content.appendChild(heading); + content.appendChild(runtimeTable); + content.style.padding = "0"; + return content; +} + +function msToHMS( ms ) { + // Convert to seconds: + var seconds = ms / 1000; + // Extract hours: + var hours = parseInt( seconds / 3600 ); + seconds = seconds % 3600; + // Extract minutes: + var minutes = parseInt( seconds / 60 ); + // Keep only seconds not extracted to minutes: + seconds = parseInt(seconds % 60); + + return hours+":"+minutes+":"+seconds; +} \ No newline at end of file diff --git a/resolver.conf.template b/resolver.conf.template new file mode 100644 index 000000000..98ea74f36 --- /dev/null +++ b/resolver.conf.template @@ -0,0 +1,4 @@ +[repositories] + local + # label ":" url [ ["," ivyPattern] "," artifactPattern [", mavenCompatible"]] + # custom_resolver : repo_url, [organization]/[module]/[revision]/[module]-[revision].ivy, [organisation]/[module]/[revision]/[artifact]-[revision](-[classifier]).[ext], mavenCompatible diff --git a/scripts/start.sh b/scripts/start.sh new file mode 100755 index 000000000..0a9e8b089 --- /dev/null +++ b/scripts/start.sh @@ -0,0 +1,185 @@ +#!/usr/bin/env bash + +# +# Copyright 2016 LinkedIn Corp. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +function print_usage() { + echo "usage: ./start.sh PATH_TO_APP_CONFIG_DIR(optional, if you have already set env variable ELEPHANT_CONF_DIR)" +} + +function check_config() { + if [ -z "${!1}" ]; then + echo "error: ${1} must be present in the config file." + check=0 + else + echo "${1}: " ${!1} + fi +} + +# Save project root dir +script_path=`which $0` +script_dir=`dirname $script_path` +project_root=$script_dir/../ + +# User could set an environmental variable, ELEPHANT_CONF_DIR, or pass an optional argument(config file path) +if [ -z "$1" ]; then + if [ -z "$ELEPHANT_CONF_DIR" ]; then + if [ -d "${project_root}/app-conf" ]; then + ELEPHANT_CONF_DIR=$project_root/app-conf + else + echo "error: Couldn't find the configuration directory." + echo "Please set env variable ELEPHANT_CONF_DIR to the configuration directory or pass the location as an argument." + print_usage + exit 1 + fi + fi + CONF_DIR="$ELEPHANT_CONF_DIR" +else + CONF_DIR=$1 +fi + +# Verify and get absolute path to conf +if [ -d "$CONF_DIR" ]; then + CONF_DIR=`cd "$CONF_DIR";pwd` + echo "Using config dir: $CONF_DIR" +else + echo "error: ${1} is not a directory or it does not exist. Please specify the application's configuration directory(app-conf)" + print_usage + exit 1 +fi + +# set/update env variable so Dr. run script will use this dir and load all confs into classpath +export ELEPHANT_CONF_DIR=$CONF_DIR + +CONFIG_FILE=$ELEPHANT_CONF_DIR"/elephant.conf" +echo "Using config file: "$CONFIG_FILE + +# User must give a valid file as argument +if [ -f $CONFIG_FILE ]; +then + echo "Reading from config file..." +else + echo "error: Couldn't find a valid config file at: " $CONFIG_FILE + print_usage + exit 1 +fi + +source $CONFIG_FILE + +# db_url, db_name ad db_user must be present in the config file +check=1 +check_config db_url +check_config db_name +check_config db_user + +if [ $check = 0 ]; +then + echo "error: Failed to get configs for dr.Elephant. Please check the config file." + exit 1 +fi + +db_loc="jdbc:mysql://"$db_url"/"$db_name"?characterEncoding=UTF-8" + +# db_password is optional. default is "" +db_password="${db_password:-""}" + +#port is optional. default is 8080 +port="${port:-8080}" +echo "http port: " $port + +# Check for keytab_user, keytab_location and application_secret in the elephant.conf +if [ -n "${keytab_user}" ]; then + echo "keytab_user: " $keytab_user + OPTS+=" -Dkeytab.user=$keytab_user" +fi + +if [ -n "${keytab_location}" ]; then + echo "keytab_location: " $keytab_location + OPTS+=" -Dkeytab.location=$keytab_location" +fi + +if [ -n "${application_secret}" ]; then + OPTS+=" -Dapplication.secret=$application_secret" +fi + +# Enable web analytics if configured +if [ -n "${enable_analytics}" ]; then + OPTS+=" -Denable.analytics=$enable_analytics" +fi + +# Enable Dropwizard metrics if configured +if [ -n "${metrics}" ]; then + OPTS+=" -Dmetrics=$metrics" +fi + +# Enable metrics agent jar if configured. Agent publishes metrics to other apps. +if [ -n "${metrics_agent_jar}" ]; then + OPTS+=" -J$metrics_agent_jar" +fi + + +# Navigate to project root +cd $project_root + +# Check if Dr. Elephant already started +if [ -f RUNNING_PID ]; +then + echo "error: Dr. Elephant already started!" + exit 1 +fi + +# Dr. Elephant executable not found +if [ ! -f bin/dr-elephant ]; +then + echo "error: I couldn't find any dr. Elephant executable." + exit 1 +fi + +# Get hadoop version by executing 'hadoop version' and parse the result +HADOOP_VERSION=$(hadoop version | awk '{if (NR == 1) {print $2;}}') +if [[ $HADOOP_VERSION == 1* ]]; +then + echo "This is hadoop1.x grid. Switch to hadoop2 if you want to use Dr. Elephant" +elif [[ $HADOOP_VERSION == 2* ]]; +then + JAVA_LIB_PATH=$HADOOP_HOME"/lib/native" + echo "This is hadoop2.x grid. Add Java library path: "$JAVA_LIB_PATH +else + echo "error: Hadoop isn't properly set on this machine. Could you verify cmd 'hadoop version'? " + exit 1 +fi + +OPTS+=" $jvm_args -Djava.library.path=$JAVA_LIB_PATH" +OPTS+=" -Dhttp.port=$port" +OPTS+=" -Ddb.default.url=$db_loc -Ddb.default.user=$db_user -Ddb.default.password=$db_password" + +# set Java related options (e.g. -Xms1024m -Xmx1024m) +export JAVA_OPTS="-XX:+HeapDumpOnOutOfMemoryError" + +# Start Dr. Elaphant +echo "Starting Dr. Elephant ...." +nohup ./bin/dr-elephant ${OPTS} > $project_root/dr.log 2>&1 & + +sleep 2 + +# If Dr. Elephant starts successfully, Play should create a file 'RUNNING_PID' under project root +if [ -f RUNNING_PID ]; +then + echo "Dr. Elephant started." +else + echo "error: Failed to start Dr. Elephant. Please check if this is a valid dr.E executable or logs under 'logs' directory." + exit 1 +fi diff --git a/scripts/stop.sh b/scripts/stop.sh new file mode 100755 index 000000000..d085fc22d --- /dev/null +++ b/scripts/stop.sh @@ -0,0 +1,50 @@ +#!/usr/bin/env bash + +# +# Copyright 2016 LinkedIn Corp. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +# Navigate to project root dir +script_dir=`which $0` +script_dir=`dirname $script_dir` +project_root=$script_dir/../ +cd $project_root + +# If file RUNNING_PID exists, it means Dr. Elephant is running +if [ -f RUNNING_PID ]; +then + echo "Dr.Elephant is running." +else + echo "Dr.Elephant is not running." + exit 1 +fi + +# RUNNING_PID contains PID of our Dr. Elephant instance +proc=`cat RUNNING_PID` + +echo "Killing Dr.Elephant...." +kill $proc + +# Wait for a while +sleep 1 + +# Play should remove RUNNING_PID when we kill the running process +if [ ! -f RUNNING_PID ]; +then + echo "Dr.Elephant is killed." +else + echo "Failed to kill Dr.Elephant." + exit 1 +fi diff --git a/test/com/linkedin/drelephant/analysis/AnalyticJobTest.java b/test/com/linkedin/drelephant/analysis/AnalyticJobTest.java new file mode 100644 index 000000000..6d71e2d26 --- /dev/null +++ b/test/com/linkedin/drelephant/analysis/AnalyticJobTest.java @@ -0,0 +1,202 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.analysis; + +import com.linkedin.drelephant.ElephantContext; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import com.linkedin.drelephant.mapreduce.fetchers.MapReduceFetcherHadoop2; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.mapreduce.heuristics.MapperDataSkewHeuristic; +import common.TestUtil; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import mockit.Expectations; +import mockit.Mocked; +import models.AppResult; +import org.junit.Ignore; +import org.junit.Test; + +import static common.TestConstants.*; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + + +/** + * Test aims to exercise {@code getAnalysis()} method in {@code AnalyticJob}.
+ * Dependencies to {@code ElephantContext, ElephantFetcher and Heuristics} are mocked + * out with JMockit. + */ +@Ignore +public class AnalyticJobTest { + @Mocked(stubOutClassInitialization = true) + ElephantContext elephantContext = null; + @Mocked + MapReduceFetcherHadoop2 fetcher; + + @Test + public void testGetAnalysis() + throws Exception { + try { + // Setup analytic job + final AnalyticJob analyticJob = new AnalyticJob(). + setAppId(TEST_JOB_ID1).setAppType(new ApplicationType(TEST_APP_TYPE)). + setFinishTime(1462178403).setStartTime(1462178412).setName(TEST_JOB_NAME). + setQueueName(TEST_DEFAULT_QUEUE_NAME).setUser(TEST_USERNAME).setTrackingUrl(TEST_TRACKING_URL); + + // Setup job counter data + String filePath = FILENAME_JOBCOUNTER; + MapReduceCounterData jobCounter = new MapReduceCounterData(); + setCounterData(jobCounter, filePath); + + // Setup mapper data + long[][] mapperTasksTime = {{2563, 0, 0, 0, 0}, {2562, 0, 0, 0, 0}, {2567, 0, 0, 0, 0}}; + MapReduceTaskData[] mappers = new MapReduceTaskData[3]; + for (int i = 1; i <= mappers.length; i++) { + MapReduceCounterData taskCounter = new MapReduceCounterData(); + setCounterData(taskCounter, FILENAME_MAPPERTASK.replaceFirst("\\$", Integer.toString(i))); + mappers[i - 1 ] = new MapReduceTaskData("task-id-"+(i-1), "task-attempt-id-"+(i-1)); + mappers[i - 1].setTimeAndCounter(mapperTasksTime[i - 1], taskCounter); + } + + // Setup reducer data + long[][] reducerTasksTime = {{1870, 1665, 14, 0, 0}}; + MapReduceTaskData[] reducers = new MapReduceTaskData[1]; + for (int i = 1; i <= reducers.length; i++) { + MapReduceCounterData taskCounter = new MapReduceCounterData(); + setCounterData(taskCounter, FILENAME_REDUCERTASK.replaceFirst("\\$", Integer.toString(i))); + reducers[i - 1] = new MapReduceTaskData("task-id-"+(i-1), "task-attempt-id-"+(i-1)); + reducers[i - 1].setTimeAndCounter(reducerTasksTime[i - 1], taskCounter); + } + + // Setup job configuration data + filePath = FILENAME_JOBCONF; + Properties jobConf = TestUtil.loadProperties(filePath); + + // Setup application data + final MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter). + setMapperData(mappers).setReducerData(reducers).setJobConf(jobConf).setSucceeded(true). + setDiagnosticInfo("").setUsername(TEST_USERNAME).setUrl("").setJobName(TEST_JOB_NAME). + setStartTime(1462178412).setFinishTime(1462178403).setRetry(false).setAppId(TEST_JOB_ID1); + + // Setup heuristics + final List heuristics = loadHeuristics(); + + // Setup job type + final JobType jobType = new JobType(TEST_JOB_TYPE, TEST_JOBCONF_NAME, TEST_JOBCONF_PATTERN); + + // Set expectations in JMockit + new Expectations() {{ + fetcher.fetchData(analyticJob); + result = data; + + elephantContext.getHeuristicsForApplicationType(analyticJob.getAppType()); + result = heuristics; + + elephantContext.matchJobType(data); + result = jobType; + }}; + + // Call the method under test + AppResult result = analyticJob.getAnalysis(); + + // Make assertions on result + assertTrue("Result is null", result != null); + assertTrue("Score did not match", result.score == TEST_SCORE); + assertTrue("Severity did not match", result.severity.toString().equals(TEST_SEVERITY)); + assertTrue("APP ID did not match", result.id.equals(TEST_JOB_ID1)); + assertTrue("Scheduler did not match", result.scheduler.equals(TEST_SCHEDULER)); + } catch (Exception e) { + e.printStackTrace(); + assertFalse("Test failed with exception", true); + } + } + + private void setCounterData(MapReduceCounterData counter, String filePath) + throws IOException { + Properties counterData = TestUtil.loadProperties(filePath); + + for (Object groupName : counterData.keySet()) { + String counterValueString = (String) counterData.get(groupName); + counterValueString = counterValueString.replaceAll("\\{|\\}", ""); + + StringBuilder stringBuilder = new StringBuilder(); + + for (String counterKeyValue : counterValueString.split(",")) { + stringBuilder.append(counterKeyValue.trim()).append('\n'); + } + ByteArrayInputStream inputStream = new ByteArrayInputStream(stringBuilder.toString().getBytes(DEFAULT_ENCODING)); + Properties counterProperties = new Properties(); + counterProperties.load(inputStream); + + for (Object counterKey : counterProperties.keySet()) { + long counterValue = Long.parseLong(counterProperties.get(counterKey).toString()); + counter.set(groupName.toString(), counterKey.toString(), counterValue); + } + } + } + + private List loadHeuristics() { + List heuristics = new ArrayList(); + // dummy hash map + Map paramsMap = new HashMap(); + heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Mapper Data Skew", + "com.linkedin.drelephant.mapreduce.heuristics.MapperDataSkewHeuristic", + "views.html.help.mapreduce.helpMapperDataSkew", new ApplicationType("mapreduce"), paramsMap))); + heuristics.add(new MapperDataSkewHeuristic( + new HeuristicConfigurationData("Mapper GC", "com.linkedin.drelephant.mapreduce.heuristics.MapperGCHeuristic", + "views.html.help.mapreduce.helpGC", new ApplicationType("mapreduce"), paramsMap))); + heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Mapper Time", + "com.linkedin.drelephant.mapreduce.heuristics.MapperTimeHeuristic", "views.html.help.mapreduce.helpMapperTime", + new ApplicationType("mapreduce"), paramsMap))); + heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Mapper Speed", + "com.linkedin.drelephant.mapreduce.heuristics.MapperSpeedHeuristic", + "views.html.help.mapreduce.helpMapperSpeed", new ApplicationType("mapreduce"), paramsMap))); + heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Mapper Spill", + "com.linkedin.drelephant.mapreduce.heuristics.MapperSpillHeuristic", + "views.html.help.mapreduce.helpMapperSpill", new ApplicationType("mapreduce"), paramsMap))); + heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Mapper Memory", + "com.linkedin.drelephant.mapreduce.heuristics.MapperMemoryHeuristic", + "views.html.help.mapreduce.helpMapperMemory", new ApplicationType("mapreduce"), paramsMap))); + heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Reducer Data Skew", + "com.linkedin.drelephant.mapreduce.heuristics.ReducerDataSkewHeuristic", + "views.html.help.mapreduce.helpReducerDataSkew", new ApplicationType("mapreduce"), paramsMap))); + heuristics.add(new MapperDataSkewHeuristic( + new HeuristicConfigurationData("Reducer GC", "com.linkedin.drelephant.mapreduce.heuristics.ReducerGCHeuristic", + "views.html.help.mapreduce.helpGC", new ApplicationType("mapreduce"), paramsMap))); + heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Reducer Time", + "com.linkedin.drelephant.mapreduce.heuristics.ReducerTimeHeuristic", + "views.html.help.mapreduce.helpReducerTime", new ApplicationType("mapreduce"), paramsMap))); + heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Reducer Memory", + "com.linkedin.drelephant.mapreduce.heuristics.ReducerMemoryHeuristic", + "views.html.help.mapreduce.helpReducerMemory", new ApplicationType("mapreduce"), paramsMap))); + heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Shuffle & Sort", + "com.linkedin.drelephant.mapreduce.heuristics.ShuffleSortHeuristic", + "views.html.help.mapreduce.helpShuffleSort", new ApplicationType("mapreduce"), paramsMap))); + heuristics.add(new MapperDataSkewHeuristic( + new HeuristicConfigurationData("Exception", "com.linkedin.drelephant.mapreduce.heuristics.ExceptionHeuristic", + "views.html.help.mapreduce.helpException", new ApplicationType("mapreduce"), paramsMap))); + + return heuristics; + } +} diff --git a/test/com/linkedin/drelephant/analysis/SeverityTest.java b/test/com/linkedin/drelephant/analysis/SeverityTest.java new file mode 100644 index 000000000..7f54accfc --- /dev/null +++ b/test/com/linkedin/drelephant/analysis/SeverityTest.java @@ -0,0 +1,50 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.analysis; + +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + + +public class SeverityTest { + + @Test + public void testSeverityMax() { + assertEquals(Severity.CRITICAL, Severity.max(Severity.CRITICAL)); + assertEquals(Severity.CRITICAL, Severity.max(Severity.CRITICAL, Severity.SEVERE)); + assertEquals(Severity.CRITICAL, Severity.max(Severity.LOW, Severity.LOW, Severity.CRITICAL)); + } + + @Test + public void testSeverityMin() { + assertEquals(Severity.NONE, Severity.min(Severity.NONE, Severity.LOW)); + assertEquals(Severity.LOW, Severity.min(Severity.LOW, Severity.LOW)); + } + + @Test + public void testSeverityAscending() { + assertEquals(Severity.CRITICAL, Severity.getSeverityAscending(8, 2, 4, 6, 8)); + assertEquals(Severity.SEVERE, Severity.getSeverityAscending(10, 2, 4, 6, 12)); + } + + @Test + public void testSeverityDescending() { + assertEquals(Severity.CRITICAL, Severity.getSeverityDescending(2, 10, 8, 4, 2)); + assertEquals(Severity.MODERATE, Severity.getSeverityDescending(5, 10, 8, 4, 2)); + } +} diff --git a/test/com/linkedin/drelephant/analysis/SeverityThresholdsTest.scala b/test/com/linkedin/drelephant/analysis/SeverityThresholdsTest.scala new file mode 100644 index 000000000..488118e01 --- /dev/null +++ b/test/com/linkedin/drelephant/analysis/SeverityThresholdsTest.scala @@ -0,0 +1,73 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.analysis + +import org.scalatest.{FunSpec, Matchers} + +class SeverityThresholdsTest extends FunSpec with Matchers { + describe("SeverityThresholds") { + it("can be used to represent thresholds considered in ascending order") { + val thresholds = SeverityThresholds(low = 0.2D, moderate = 0.4D, severe = 0.6D, critical = 0.8D, ascending = true) + thresholds.severityOf(0.1D) should be(Severity.NONE) + thresholds.severityOf(0.2D) should be(Severity.LOW) + thresholds.severityOf(0.3D) should be(Severity.LOW) + thresholds.severityOf(0.4D) should be(Severity.MODERATE) + thresholds.severityOf(0.5D) should be(Severity.MODERATE) + thresholds.severityOf(0.6D) should be(Severity.SEVERE) + thresholds.severityOf(0.7D) should be(Severity.SEVERE) + thresholds.severityOf(0.8D) should be(Severity.CRITICAL) + thresholds.severityOf(0.9D) should be(Severity.CRITICAL) + } + + it("can be used to represent thresholds considered in descending order") { + val thresholds = SeverityThresholds(low = 0.8D, moderate = 0.6D, severe = 0.4D, critical = 0.2D, ascending = false) + thresholds.severityOf(0.1D) should be(Severity.CRITICAL) + thresholds.severityOf(0.2D) should be(Severity.CRITICAL) + thresholds.severityOf(0.3D) should be(Severity.SEVERE) + thresholds.severityOf(0.4D) should be(Severity.SEVERE) + thresholds.severityOf(0.5D) should be(Severity.MODERATE) + thresholds.severityOf(0.6D) should be(Severity.MODERATE) + thresholds.severityOf(0.7D) should be(Severity.LOW) + thresholds.severityOf(0.8D) should be(Severity.LOW) + thresholds.severityOf(0.9D) should be(Severity.NONE) + } + + it("can be parsed as ascending thresholds from a string that can be processed by Utils.getParam") { + SeverityThresholds.parse("0.2,0.4,0.6,0.8", ascending = true) should be( + Some(SeverityThresholds(low = 0.2D, moderate = 0.4D, severe = 0.6D, critical = 0.8D, ascending = true)) + ) + } + + it("can be parsed as descending thresholds from a string that can be processed by Utils.getParam") { + SeverityThresholds.parse("0.8,0.6,0.4,0.2", ascending = false) should be( + Some(SeverityThresholds(low = 0.8D, moderate = 0.6D, severe = 0.4D, critical = 0.2D, ascending = false)) + ) + } + + it("cannot be created as ascending thresholds with unordered values") { + an[IllegalArgumentException] should be thrownBy( + SeverityThresholds(low = 0.8D, moderate = 0.6D, severe = 0.4D, critical = 0.2D, ascending = true) + ) + } + + it("cannot be created as descending thresholds with unordered values") { + an[IllegalArgumentException] should be thrownBy( + SeverityThresholds(low = 0.2D, moderate = 0.4D, severe = 0.6D, critical = 0.8D, ascending = false) + ) + } + } +} diff --git a/test/com/linkedin/drelephant/analysis/heuristics/MapperDataSkewHeuristicTest.java b/test/com/linkedin/drelephant/analysis/heuristics/MapperDataSkewHeuristicTest.java deleted file mode 100644 index b6016647a..000000000 --- a/test/com/linkedin/drelephant/analysis/heuristics/MapperDataSkewHeuristicTest.java +++ /dev/null @@ -1,75 +0,0 @@ -package com.linkedin.drelephant.analysis.heuristics; - -import java.io.IOException; -import java.util.HashMap; - -import com.linkedin.drelephant.analysis.Constants; -import com.linkedin.drelephant.analysis.Heuristic; -import com.linkedin.drelephant.analysis.HeuristicResult; -import com.linkedin.drelephant.analysis.Severity; -import com.linkedin.drelephant.hadoop.HadoopCounterHolder; -import com.linkedin.drelephant.hadoop.HadoopCounterHolder.CounterName; -import com.linkedin.drelephant.hadoop.HadoopJobData; -import com.linkedin.drelephant.hadoop.HadoopTaskData; - -import junit.framework.TestCase; - - -public class MapperDataSkewHeuristicTest extends TestCase { - - private static final long unitSize = Constants.HDFS_BLOCK_SIZE / 64; - Heuristic heuristic = new MapperDataSkewHeuristic(); - - public void testCritical() throws IOException { - assertEquals(Severity.CRITICAL, analyzeJob(200, 200, 1 * unitSize, 100 * unitSize)); - } - - public void testSevere() throws IOException { - assertEquals(Severity.SEVERE, analyzeJob(200, 200, 10 * unitSize, 100 * unitSize)); - } - - public void testModerate() throws IOException { - assertEquals(Severity.MODERATE, analyzeJob(200, 200, 20 * unitSize, 100 * unitSize)); - } - - public void testLow() throws IOException { - assertEquals(Severity.LOW, analyzeJob(200, 200, 30 * unitSize, 100 * unitSize)); - } - - public void testNone() throws IOException { - assertEquals(Severity.NONE, analyzeJob(200, 200, 50 * unitSize, 100 * unitSize)); - } - - public void testSmallFiles() throws IOException { - assertEquals(Severity.NONE, analyzeJob(200, 200, 1 * unitSize, 5 * unitSize)); - } - - public void testSmallTasks() throws IOException { - assertEquals(Severity.NONE, analyzeJob(5, 5, 10 * unitSize, 100 * unitSize)); - } - - private Severity analyzeJob(int numSmallTasks, int numLargeTasks, long smallInputSize, long largeInputSize) - throws IOException { - HadoopCounterHolder jobCounter = new HadoopCounterHolder(null); - HadoopTaskData[] mappers = new HadoopTaskData[numSmallTasks + numLargeTasks]; - - HadoopCounterHolder smallCounter = new HadoopCounterHolder(new HashMap()); - smallCounter.set(HadoopCounterHolder.CounterName.HDFS_BYTES_READ, smallInputSize); - - HadoopCounterHolder largeCounter = new HadoopCounterHolder(new HashMap()); - largeCounter.set(HadoopCounterHolder.CounterName.HDFS_BYTES_READ, largeInputSize); - - int i = 0; - for (; i < numSmallTasks; i++) { - mappers[i] = new HadoopTaskData(smallCounter, new long[4]); - } - for (; i < numSmallTasks + numLargeTasks; i++) { - mappers[i] = new HadoopTaskData(largeCounter, new long[4]); - } - - HadoopJobData data = new HadoopJobData().setCounters(jobCounter).setMapperData(mappers); - HeuristicResult result = heuristic.apply(data); - return result.getSeverity(); - - } -} diff --git a/test/com/linkedin/drelephant/analysis/heuristics/MapperInputSizeHeuristicTest.java b/test/com/linkedin/drelephant/analysis/heuristics/MapperInputSizeHeuristicTest.java deleted file mode 100644 index a0d61f215..000000000 --- a/test/com/linkedin/drelephant/analysis/heuristics/MapperInputSizeHeuristicTest.java +++ /dev/null @@ -1,79 +0,0 @@ -package com.linkedin.drelephant.analysis.heuristics; - -import java.io.IOException; -import java.util.HashMap; - -import com.linkedin.drelephant.analysis.Constants; -import com.linkedin.drelephant.analysis.Heuristic; -import com.linkedin.drelephant.analysis.HeuristicResult; -import com.linkedin.drelephant.analysis.Severity; -import com.linkedin.drelephant.hadoop.HadoopCounterHolder; -import com.linkedin.drelephant.hadoop.HadoopCounterHolder.CounterName; -import com.linkedin.drelephant.hadoop.HadoopJobData; -import com.linkedin.drelephant.hadoop.HadoopTaskData; - -import junit.framework.TestCase; - - -public class MapperInputSizeHeuristicTest extends TestCase { - - private static final long unitSize = Constants.HDFS_BLOCK_SIZE; - Heuristic heuristic = new MapperInputSizeHeuristic(); - - public void testLargeFileCritical() throws IOException { - assertEquals(Severity.CRITICAL, analyzeJob(100, 5 * unitSize)); - } - - public void testLargeFileSevere() throws IOException { - assertEquals(Severity.SEVERE, analyzeJob(200, 5 * unitSize)); - } - - public void testLargeFileModerate() throws IOException { - assertEquals(Severity.MODERATE, analyzeJob(500, 5 * unitSize)); - } - - public void testLargeFileLow() throws IOException { - assertEquals(Severity.LOW, analyzeJob(1000, 5 * unitSize)); - } - - public void testLargeFileNone() throws IOException { - assertEquals(Severity.NONE, analyzeJob(2000, 5 * unitSize)); - } - - public void testSmallFileCritical() throws IOException { - assertEquals(Severity.CRITICAL, analyzeJob(500, unitSize / 32)); - } - - public void testSmallFileSevere() throws IOException { - assertEquals(Severity.SEVERE, analyzeJob(200, unitSize / 32)); - } - - public void testSmallFileModerate() throws IOException { - assertEquals(Severity.MODERATE, analyzeJob(50, unitSize / 32)); - } - - public void testSmallFileLow() throws IOException { - assertEquals(Severity.LOW, analyzeJob(10, unitSize / 32)); - } - - public void testSmallFileNone() throws IOException { - assertEquals(Severity.NONE, analyzeJob(5, unitSize / 32)); - } - - private Severity analyzeJob(int numTasks, long inputSize) throws IOException { - HadoopCounterHolder jobCounter = new HadoopCounterHolder(null); - HadoopTaskData[] mappers = new HadoopTaskData[numTasks]; - - HadoopCounterHolder taskCounter = new HadoopCounterHolder(new HashMap()); - taskCounter.set(HadoopCounterHolder.CounterName.HDFS_BYTES_READ, inputSize); - - int i = 0; - for (; i < numTasks; i++) { - mappers[i] = new HadoopTaskData(taskCounter, new long[4]); - } - - HadoopJobData data = new HadoopJobData().setCounters(jobCounter).setMapperData(mappers); - HeuristicResult result = heuristic.apply(data); - return result.getSeverity(); - } -} diff --git a/test/com/linkedin/drelephant/analysis/heuristics/MapperSpeedHeuristicTest.java b/test/com/linkedin/drelephant/analysis/heuristics/MapperSpeedHeuristicTest.java deleted file mode 100644 index 880b5dd26..000000000 --- a/test/com/linkedin/drelephant/analysis/heuristics/MapperSpeedHeuristicTest.java +++ /dev/null @@ -1,65 +0,0 @@ -package com.linkedin.drelephant.analysis.heuristics; - -import java.io.IOException; -import java.util.HashMap; - -import com.linkedin.drelephant.analysis.Constants; -import com.linkedin.drelephant.analysis.Heuristic; -import com.linkedin.drelephant.analysis.HeuristicResult; -import com.linkedin.drelephant.analysis.Severity; -import com.linkedin.drelephant.hadoop.HadoopCounterHolder; -import com.linkedin.drelephant.hadoop.HadoopCounterHolder.CounterName; -import com.linkedin.drelephant.hadoop.HadoopJobData; -import com.linkedin.drelephant.hadoop.HadoopTaskData; -import com.linkedin.drelephant.math.Statistics; - -import junit.framework.TestCase; - - -public class MapperSpeedHeuristicTest extends TestCase { - Heuristic heuristic = new MapperSpeedHeuristic(); - private static final long unitSize = Constants.HDFS_BLOCK_SIZE / 64; - private static final long minute = Statistics.MINUTE; - private static final int numTasks = Constants.SHUFFLE_SORT_MAX_SAMPLE_SIZE; - - public void testCritical() throws IOException { - assertEquals(Severity.CRITICAL, analyzeJob(120 * minute, 10000 * unitSize)); - } - - public void testSevere() throws IOException { - assertEquals(Severity.SEVERE, analyzeJob(120 * minute, 50000 * unitSize)); - } - - public void testModerate() throws IOException { - assertEquals(Severity.MODERATE, analyzeJob(120 * minute, 100000 * unitSize)); - } - - public void testLow() throws IOException { - assertEquals(Severity.LOW, analyzeJob(120 * minute, 200000 * unitSize)); - } - - public void testNone() throws IOException { - assertEquals(Severity.NONE, analyzeJob(120 * minute, 500000 * unitSize)); - } - - public void testShortTask() throws IOException { - assertEquals(Severity.NONE, analyzeJob(2 * minute, 10 * unitSize)); - } - - private Severity analyzeJob(long runtime, long readBytes) throws IOException { - HadoopCounterHolder jobCounter = new HadoopCounterHolder(null); - HadoopTaskData[] mappers = new HadoopTaskData[numTasks]; - - HadoopCounterHolder counter = new HadoopCounterHolder(new HashMap()); - counter.set(HadoopCounterHolder.CounterName.HDFS_BYTES_READ, readBytes); - - int i = 0; - for (; i < numTasks; i++) { - mappers[i] = new HadoopTaskData(counter, new long[]{0,runtime,0,0}); - } - - HadoopJobData data = new HadoopJobData().setCounters(jobCounter).setMapperData(mappers); - HeuristicResult result = heuristic.apply(data); - return result.getSeverity(); - } -} diff --git a/test/com/linkedin/drelephant/analysis/heuristics/ReducerDataSkewHeuristicTest.java b/test/com/linkedin/drelephant/analysis/heuristics/ReducerDataSkewHeuristicTest.java deleted file mode 100644 index 625d9da87..000000000 --- a/test/com/linkedin/drelephant/analysis/heuristics/ReducerDataSkewHeuristicTest.java +++ /dev/null @@ -1,73 +0,0 @@ -package com.linkedin.drelephant.analysis.heuristics; - -import java.io.IOException; -import java.util.HashMap; - -import com.linkedin.drelephant.analysis.Constants; -import com.linkedin.drelephant.analysis.Heuristic; -import com.linkedin.drelephant.analysis.HeuristicResult; -import com.linkedin.drelephant.analysis.Severity; -import com.linkedin.drelephant.hadoop.HadoopCounterHolder; -import com.linkedin.drelephant.hadoop.HadoopCounterHolder.CounterName; -import com.linkedin.drelephant.hadoop.HadoopJobData; -import com.linkedin.drelephant.hadoop.HadoopTaskData; - -import junit.framework.TestCase; - - -public class ReducerDataSkewHeuristicTest extends TestCase { - private static final long unitSize = Constants.HDFS_BLOCK_SIZE / 64; - Heuristic heuristic = new ReducerDataSkewHeuristic(); - - public void testCritical() throws IOException { - assertEquals(Severity.CRITICAL, analyzeJob(200, 200, 1 * unitSize, 100 * unitSize)); - } - - public void testSevere() throws IOException { - assertEquals(Severity.SEVERE, analyzeJob(200, 200, 10 * unitSize, 100 * unitSize)); - } - - public void testModerate() throws IOException { - assertEquals(Severity.MODERATE, analyzeJob(200, 200, 20 * unitSize, 100 * unitSize)); - } - - public void testLow() throws IOException { - assertEquals(Severity.LOW, analyzeJob(200, 200, 30 * unitSize, 100 * unitSize)); - } - - public void testNone() throws IOException { - assertEquals(Severity.NONE, analyzeJob(200, 200, 50 * unitSize, 100 * unitSize)); - } - - public void testSmallFiles() throws IOException { - assertEquals(Severity.NONE, analyzeJob(200, 200, 1 * unitSize, 5 * unitSize)); - } - - public void testSmallTasks() throws IOException { - assertEquals(Severity.NONE, analyzeJob(5, 5, 10 * unitSize, 100 * unitSize)); - } - - private Severity analyzeJob(int numSmallTasks, int numLargeTasks, long smallInputSize, long largeInputSize) - throws IOException { - HadoopCounterHolder jobCounter = new HadoopCounterHolder(null); - HadoopTaskData[] reducers = new HadoopTaskData[numSmallTasks + numLargeTasks]; - - HadoopCounterHolder smallCounter = new HadoopCounterHolder(new HashMap()); - smallCounter.set(HadoopCounterHolder.CounterName.REDUCE_SHUFFLE_BYTES, smallInputSize); - - HadoopCounterHolder largeCounter = new HadoopCounterHolder(new HashMap()); - largeCounter.set(HadoopCounterHolder.CounterName.REDUCE_SHUFFLE_BYTES, largeInputSize); - - int i = 0; - for (; i < numSmallTasks; i++) { - reducers[i] = new HadoopTaskData(smallCounter, new long[4]); - } - for (; i < numSmallTasks + numLargeTasks; i++) { - reducers[i] = new HadoopTaskData(largeCounter, new long[4]); - } - - HadoopJobData data = new HadoopJobData().setCounters(jobCounter).setReducerData(reducers); - HeuristicResult result = heuristic.apply(data); - return result.getSeverity(); - } -} diff --git a/test/com/linkedin/drelephant/analysis/heuristics/ReducerTimeHeuristicTest.java b/test/com/linkedin/drelephant/analysis/heuristics/ReducerTimeHeuristicTest.java deleted file mode 100644 index 916e21334..000000000 --- a/test/com/linkedin/drelephant/analysis/heuristics/ReducerTimeHeuristicTest.java +++ /dev/null @@ -1,73 +0,0 @@ -package com.linkedin.drelephant.analysis.heuristics; - -import java.io.IOException; - -import com.linkedin.drelephant.analysis.Heuristic; -import com.linkedin.drelephant.analysis.HeuristicResult; -import com.linkedin.drelephant.analysis.Severity; -import com.linkedin.drelephant.hadoop.HadoopCounterHolder; -import com.linkedin.drelephant.hadoop.HadoopJobData; -import com.linkedin.drelephant.hadoop.HadoopTaskData; -import com.linkedin.drelephant.math.Statistics; - -import junit.framework.TestCase; - - -public class ReducerTimeHeuristicTest extends TestCase { - Heuristic heuristic = new ReducerTimeHeuristic(); - private static final long minute = Statistics.MINUTE;; - - public void testShortRunetimeCritical() throws IOException { - assertEquals(Severity.CRITICAL, analyzeJob(1 * minute, 500)); - } - - public void testShortRunetimeSevere() throws IOException { - assertEquals(Severity.SEVERE, analyzeJob(1 * minute, 200)); - } - - public void testShortRunetimeModerate() throws IOException { - assertEquals(Severity.MODERATE, analyzeJob(1 * minute, 50)); - } - - public void testShortRunetimeLow() throws IOException { - assertEquals(Severity.LOW, analyzeJob(1 * minute, 10)); - } - - public void testShortRunetimeNone() throws IOException { - assertEquals(Severity.NONE, analyzeJob(1 * minute, 2)); - } - - public void testLongRunetimeCritical() throws IOException { - assertEquals(Severity.CRITICAL, analyzeJob(120 * minute, 10)); - } - - public void testLongRunetimeSevere() throws IOException { - assertEquals(Severity.SEVERE, analyzeJob(120 * minute, 20)); - } - - public void testLongRunetimeModerate() throws IOException { - assertEquals(Severity.MODERATE, analyzeJob(120 * minute, 50)); - } - - public void testLongRunetimeLow() throws IOException { - assertEquals(Severity.LOW, analyzeJob(120 * minute, 100)); - } - - public void testLongRunetimeNone() throws IOException { - assertEquals(Severity.NONE, analyzeJob(120 * minute, 200)); - } - - private Severity analyzeJob(long runtime, int numTasks) throws IOException { - HadoopCounterHolder dummyCounter = new HadoopCounterHolder(null); - HadoopTaskData[] reducers = new HadoopTaskData[numTasks]; - - int i = 0; - for (; i < numTasks; i++) { - reducers[i] = new HadoopTaskData(dummyCounter, new long[]{0,runtime,0,0}); - } - - HadoopJobData data = new HadoopJobData().setCounters(dummyCounter).setReducerData(reducers); - HeuristicResult result = heuristic.apply(data); - return result.getSeverity(); - } -} diff --git a/test/com/linkedin/drelephant/analysis/heuristics/ShuffleSortHeuristicTest.java b/test/com/linkedin/drelephant/analysis/heuristics/ShuffleSortHeuristicTest.java deleted file mode 100644 index 4c9e880a9..000000000 --- a/test/com/linkedin/drelephant/analysis/heuristics/ShuffleSortHeuristicTest.java +++ /dev/null @@ -1,83 +0,0 @@ -package com.linkedin.drelephant.analysis.heuristics; - -import java.io.IOException; - -import com.linkedin.drelephant.analysis.Constants; -import com.linkedin.drelephant.analysis.Heuristic; -import com.linkedin.drelephant.analysis.HeuristicResult; -import com.linkedin.drelephant.analysis.Severity; -import com.linkedin.drelephant.hadoop.HadoopCounterHolder; -import com.linkedin.drelephant.hadoop.HadoopJobData; -import com.linkedin.drelephant.hadoop.HadoopTaskData; -import com.linkedin.drelephant.math.Statistics; - -import junit.framework.TestCase; - - -public class ShuffleSortHeuristicTest extends TestCase { - Heuristic heuristic = new ShuffleSortHeuristic(); - private static final int numTasks = Constants.SHUFFLE_SORT_MAX_SAMPLE_SIZE; - private static final long minute = Statistics.MINUTE;; - - public void testLongShuffleCritical() throws IOException { - assertEquals(Severity.CRITICAL, analyzeJob(30 * minute, 0, 5 * minute)); - } - - public void testLongShuffleSevere() throws IOException { - assertEquals(Severity.SEVERE, analyzeJob(30 * minute, 0, 10 * minute)); - } - - public void testLongShuffleModerate() throws IOException { - assertEquals(Severity.MODERATE, analyzeJob(30 * minute, 0, 20 * minute)); - } - - public void testLongShuffleLow() throws IOException { - assertEquals(Severity.LOW, analyzeJob(30 * minute, 0, 40 * minute)); - } - - public void testLongShuffleNone() throws IOException { - assertEquals(Severity.NONE, analyzeJob(30 * minute, 0, 80 * minute)); - } - - public void testLongSortCritical() throws IOException { - assertEquals(Severity.CRITICAL, analyzeJob(0, 30 * minute, 5 * minute)); - } - - public void testLongSortSevere() throws IOException { - assertEquals(Severity.SEVERE, analyzeJob(0, 30 * minute, 10 * minute)); - } - - public void testLongSortModerate() throws IOException { - assertEquals(Severity.MODERATE, analyzeJob(0, 30 * minute, 20 * minute)); - } - - public void testLongSortLow() throws IOException { - assertEquals(Severity.LOW, analyzeJob(0, 30 * minute, 40 * minute)); - } - - public void testLongSortNone() throws IOException { - assertEquals(Severity.NONE, analyzeJob(0, 30 * minute, 80 * minute)); - } - - public void testShortShuffle() throws IOException { - assertEquals(Severity.NONE, analyzeJob(minute / 2, 0, minute / 2)); - } - - public void testShortSort() throws IOException { - assertEquals(Severity.NONE, analyzeJob(0, minute / 2, minute / 2)); - } - - private Severity analyzeJob(long shuffleTime, long sortTime, long reduceTime) throws IOException { - HadoopCounterHolder dummyCounter = new HadoopCounterHolder(null); - HadoopTaskData[] reducers = new HadoopTaskData[numTasks]; - - int i = 0; - for (; i < numTasks; i++) { - reducers[i] = new HadoopTaskData(dummyCounter, new long[]{ 0, shuffleTime + sortTime + reduceTime, shuffleTime, sortTime}); - } - HadoopJobData data = new HadoopJobData().setCounters(dummyCounter).setReducerData(reducers); - HeuristicResult result = heuristic.apply(data); - return result.getSeverity(); - } - -} diff --git a/test/com/linkedin/drelephant/configurations/fetcher/FetcherConfigurationTest.java b/test/com/linkedin/drelephant/configurations/fetcher/FetcherConfigurationTest.java new file mode 100644 index 000000000..2db6431c4 --- /dev/null +++ b/test/com/linkedin/drelephant/configurations/fetcher/FetcherConfigurationTest.java @@ -0,0 +1,132 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.configurations.fetcher; + +import java.io.IOException; +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; + + +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.w3c.dom.Document; +import org.xml.sax.SAXException; + +import static org.junit.Assert.assertEquals; + + +public class FetcherConfigurationTest { + + private static Document document1 = null; + private static Document document2 = null; + private static Document document3 = null; + private static Document document4 = null; + private static Document document5 = null; + + private static final String spark = "SPARK"; + private static final String logDirField = "event_log_dir"; + private static final String logDirValue = "/custom/configured"; + private static final String logSizeField = "event_log_size_limit_in_mb"; + private static final String logSizeValue = "50"; + + + @BeforeClass + public static void runBeforeClass() { + try { + DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); + DocumentBuilder builder = factory.newDocumentBuilder(); + document1 = builder.parse( + FetcherConfigurationTest.class.getClassLoader().getResourceAsStream( + "configurations/fetcher/FetcherConfTest1.xml")); + document2 = builder.parse( + FetcherConfigurationTest.class.getClassLoader().getResourceAsStream( + "configurations/fetcher/FetcherConfTest2.xml")); + document3 = builder.parse( + FetcherConfigurationTest.class.getClassLoader().getResourceAsStream( + "configurations/fetcher/FetcherConfTest3.xml")); + document4 = builder.parse( + FetcherConfigurationTest.class.getClassLoader().getResourceAsStream( + "configurations/fetcher/FetcherConfTest4.xml")); + document5 = builder.parse( + FetcherConfigurationTest.class.getClassLoader().getResourceAsStream( + "configurations/fetcher/FetcherConfTest5.xml")); + } catch (ParserConfigurationException e) { + throw new RuntimeException("XML Parser could not be created.", e); + } catch (SAXException e) { + throw new RuntimeException("Test files are not properly formed", e); + } catch (IOException e) { + throw new RuntimeException("Unable to read test files ", e); + } + } + + @Rule + public ExpectedException expectedEx = ExpectedException.none(); + + /** + * Correctly configured fetcher + */ + @Test + public void testParseFetcherConf1() { + FetcherConfiguration fetcherConf = new FetcherConfiguration(document1.getDocumentElement()); + assertEquals(fetcherConf.getFetchersConfigurationData().size(), 2); + } + + /** + * No classname field + */ + @Test + public void testParseFetcherConf2() { + expectedEx.expect(RuntimeException.class); + expectedEx.expectMessage("No tag 'classname' in fetcher 2"); + FetcherConfiguration fetcherConf = new FetcherConfiguration(document2.getDocumentElement()); + } + + /** + * Empty classname field + */ + @Test + public void testParseFetcherConf3() { + expectedEx.expect(RuntimeException.class); + expectedEx.expectMessage("Empty tag 'classname' in fetcher 1"); + FetcherConfiguration fetcherConf = new FetcherConfiguration(document3.getDocumentElement()); + } + + /** + * No applicationtype tag + */ + @Test + public void testParseFetcherConf4() { + expectedEx.expect(RuntimeException.class); + expectedEx.expectMessage("No tag or invalid tag 'applicationtype' in fetcher 1" + + " classname com.linkedin.drelephant.mapreduce.fetchers.MapReduceFetcherHadoop2"); + FetcherConfiguration fetcherConf = new FetcherConfiguration(document4.getDocumentElement()); + } + + /** + * Test Spark fetcher params, Event log size and log directory + */ + @Test + public void testParseFetcherConf5() { + FetcherConfiguration fetcherConf = new FetcherConfiguration(document5.getDocumentElement()); + assertEquals(fetcherConf.getFetchersConfigurationData().size(), 1); + assertEquals(fetcherConf.getFetchersConfigurationData().get(0).getAppType().getName(), spark); + } + +} diff --git a/test/com/linkedin/drelephant/configurations/heuristic/HeuristicConfigurationTest.java b/test/com/linkedin/drelephant/configurations/heuristic/HeuristicConfigurationTest.java new file mode 100644 index 000000000..8eaf34aad --- /dev/null +++ b/test/com/linkedin/drelephant/configurations/heuristic/HeuristicConfigurationTest.java @@ -0,0 +1,124 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.configurations.heuristic; + +import java.io.IOException; +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.w3c.dom.Document; +import org.xml.sax.SAXException; + +import static org.junit.Assert.assertEquals; + + +public class HeuristicConfigurationTest { + + private static Document document1 = null; + private static Document document2 = null; + private static Document document3 = null; + private static Document document4 = null; + private static Document document5 = null; + + @BeforeClass + public static void runBeforeClass() { + try { + DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); + DocumentBuilder builder = factory.newDocumentBuilder(); + document1 = builder.parse(HeuristicConfigurationTest.class.getClassLoader() + .getResourceAsStream("configurations/heuristic/HeuristicConfTest1.xml")); + document2 = builder.parse( + HeuristicConfigurationTest.class.getClassLoader().getResourceAsStream( + "configurations/heuristic/HeuristicConfTest2.xml")); + document3 = builder.parse( + HeuristicConfigurationTest.class.getClassLoader().getResourceAsStream( + "configurations/heuristic/HeuristicConfTest3.xml")); + document4 = builder.parse( + HeuristicConfigurationTest.class.getClassLoader().getResourceAsStream( + "configurations/heuristic/HeuristicConfTest4.xml")); + document5 = builder.parse( + HeuristicConfigurationTest.class.getClassLoader().getResourceAsStream( + "configurations/heuristic/HeuristicConfTest5.xml")); + } catch (ParserConfigurationException e) { + throw new RuntimeException("XML Parser could not be created.", e); + } catch (SAXException e) { + throw new RuntimeException("Test files are not properly formed", e); + } catch (IOException e) { + throw new RuntimeException("Unable to read test files ", e); + } + } + + @Rule + public ExpectedException expectedEx = ExpectedException.none(); + + /** + * Correctly configured fetcher + */ + @Test + public void testParseFetcherConf1() { + HeuristicConfiguration heuristicConf = new HeuristicConfiguration(document1.getDocumentElement()); + assertEquals(heuristicConf.getHeuristicsConfigurationData().size(), 3); + } + + /** + * No classname tag + */ + @Test + public void testParseFetcherConf2() { + expectedEx.expect(RuntimeException.class); + expectedEx.expectMessage("No tag 'classname' in heuristic 1"); + HeuristicConfiguration heuristicConf = new HeuristicConfiguration(document2.getDocumentElement()); + } + + /** + * No heuristic name tag + */ + @Test + public void testParseFetcherConf3() { + expectedEx.expect(RuntimeException.class); + expectedEx.expectMessage("No tag 'heuristicname' in heuristic 1 classname" + + " com.linkedin.drelephant.mapreduce.heuristics.MapperDataSkewHeuristic"); + HeuristicConfiguration heuristicConf = new HeuristicConfiguration(document3.getDocumentElement()); + } + + /** + * No view name tag + */ + @Test + public void testParseFetcherConf4() { + expectedEx.expect(RuntimeException.class); + expectedEx.expectMessage("No tag 'viewname' in heuristic 1 classname" + + " com.linkedin.drelephant.mapreduce.heuristics.MapperDataSkewHeuristic"); + HeuristicConfiguration heuristicConf = new HeuristicConfiguration(document4.getDocumentElement()); + } + + /** + * No application type tag + */ + @Test + public void testParseFetcherConf5() { + expectedEx.expect(RuntimeException.class); + expectedEx.expectMessage("No tag or invalid tag 'applicationtype' in heuristic 2 classname" + + " com.linkedin.drelephant.mapreduce.heuristics.MapperGCHeuristic"); + HeuristicConfiguration heuristicConf = new HeuristicConfiguration(document5.getDocumentElement()); + } +} + diff --git a/test/com/linkedin/drelephant/configurations/jobtype/JobTypeConfigurationTest.java b/test/com/linkedin/drelephant/configurations/jobtype/JobTypeConfigurationTest.java new file mode 100644 index 000000000..a99ebd9f2 --- /dev/null +++ b/test/com/linkedin/drelephant/configurations/jobtype/JobTypeConfigurationTest.java @@ -0,0 +1,131 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.configurations.jobtype; + +import java.io.IOException; +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.w3c.dom.Document; +import org.xml.sax.SAXException; + +import static org.junit.Assert.assertEquals; + + +public class JobTypeConfigurationTest { + + private static Document document1 = null; + private static Document document2 = null; + private static Document document3 = null; + private static Document document4 = null; + private static Document document5 = null; + private static Document document6 = null; + + @BeforeClass + public static void runBeforeClass() { + try { + DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); + DocumentBuilder builder = factory.newDocumentBuilder(); + document1 = builder.parse(JobTypeConfigurationTest.class.getClassLoader() + .getResourceAsStream("configurations/jobtype/JobTypeConfTest1.xml")); + document2 = builder.parse(JobTypeConfigurationTest.class.getClassLoader() + .getResourceAsStream("configurations/jobtype/JobTypeConfTest2.xml")); + document3 = builder.parse(JobTypeConfigurationTest.class.getClassLoader() + .getResourceAsStream("configurations/jobtype/JobTypeConfTest3.xml")); + document4 = builder.parse(JobTypeConfigurationTest.class.getClassLoader() + .getResourceAsStream("configurations/jobtype/JobTypeConfTest4.xml")); + document5 = builder.parse(JobTypeConfigurationTest.class.getClassLoader() + .getResourceAsStream("configurations/jobtype/JobTypeConfTest5.xml")); + document6 = builder.parse(JobTypeConfigurationTest.class.getClassLoader() + .getResourceAsStream("configurations/jobtype/JobTypeConfTest6.xml")); + } catch (ParserConfigurationException e) { + throw new RuntimeException("XML Parser could not be created.", e); + } catch (SAXException e) { + throw new RuntimeException("Test files are not properly formed", e); + } catch (IOException e) { + throw new RuntimeException("Unable to read test files ", e); + } + } + + @Rule + public ExpectedException expectedEx = ExpectedException.none(); + + /** + * Correctly configured fetcher + */ + @Test + public void testParseFetcherConf1() { + JobTypeConfiguration jobTypeConf = new JobTypeConfiguration(document1.getDocumentElement()); + assertEquals(jobTypeConf.getAppTypeToJobTypeList().size(), 2); + } + + /** + * No name tag + */ + @Test + public void testParseFetcherConf2() { + expectedEx.expect(RuntimeException.class); + expectedEx.expectMessage("No tag 'jobtype' in jobtype 3"); + JobTypeConfiguration jobTypeConf = new JobTypeConfiguration(document2.getDocumentElement()); + } + + /** + * No conf tag + */ + @Test + public void testParseFetcherConf3() { + expectedEx.expect(RuntimeException.class); + expectedEx.expectMessage("No tag 'conf' in jobtype Spark"); + JobTypeConfiguration jobTypeConf = new JobTypeConfiguration(document3.getDocumentElement()); + } + + /** + * No applicationtype tag + */ + @Test + public void testParseFetcherConf4() { + expectedEx.expect(RuntimeException.class); + expectedEx.expectMessage("No tag 'applicationtype' in jobtype Pig"); + JobTypeConfiguration jobTypeConf = new JobTypeConfiguration(document4.getDocumentElement()); + } + + /** + * Wrong pattern for job type + */ + @Test + public void testParseFetcherConf5() { + expectedEx.expect(RuntimeException.class); + expectedEx.expectMessage("Error processing this pattern. Pattern:[(voldemort) jobtype:Voldemort"); + JobTypeConfiguration jobTypeConf = new JobTypeConfiguration(document5.getDocumentElement()); + } + + /** + * Multiple default types + */ + @Test + public void testParseFetcherConf6() { + expectedEx.expect(RuntimeException.class); + expectedEx.expectMessage("Each application type should have one and only one default job type. Duplicate default" + + " job type: jobType:Hive, for application type:mapreduce, isDefault:true, confName:hive.mapred.mode," + + " confValue:.*. for application type: MAPREDUCE"); + JobTypeConfiguration jobTypeConf = new JobTypeConfiguration(document6.getDocumentElement()); + } +} diff --git a/test/com/linkedin/drelephant/configurations/scheduler/SchedulerConfigurationTest.java b/test/com/linkedin/drelephant/configurations/scheduler/SchedulerConfigurationTest.java new file mode 100644 index 000000000..2c86ec561 --- /dev/null +++ b/test/com/linkedin/drelephant/configurations/scheduler/SchedulerConfigurationTest.java @@ -0,0 +1,107 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.configurations.scheduler; + +import com.linkedin.drelephant.configurations.scheduler.SchedulerConfiguration; +import com.linkedin.drelephant.configurations.scheduler.SchedulerConfigurationData; + +import java.io.IOException; +import java.util.List; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.w3c.dom.Document; +import org.xml.sax.SAXException; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + + +public class SchedulerConfigurationTest { + + private static Document document1; + private static Document document2; + private static Document document3; + + @BeforeClass + public static void runBeforeClass() { + try { + DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); + DocumentBuilder builder = factory.newDocumentBuilder(); + document1 = builder.parse(SchedulerConfigurationTest.class.getClassLoader() + .getResourceAsStream("configurations/scheduler/SchedulerConfTest1.xml")); + document2 = builder.parse(SchedulerConfigurationTest.class.getClassLoader() + .getResourceAsStream("configurations/scheduler/SchedulerConfTest2.xml")); + document3 = builder.parse(SchedulerConfigurationTest.class.getClassLoader() + .getResourceAsStream("configurations/scheduler/SchedulerConfTest3.xml")); + } catch (ParserConfigurationException e) { + throw new RuntimeException("XML Parser could not be created.", e); + } catch (SAXException e) { + throw new RuntimeException("Test files are not properly formed", e); + } catch (IOException e) { + throw new RuntimeException("Unable to read test files ", e); + } + } + + @Rule + public ExpectedException expectedEx = ExpectedException.none(); + + /** + * Correctly configured scheduler + */ + @Test + public void testParseSchedulerConf1() { + SchedulerConfiguration schedulerConf = new SchedulerConfiguration(document1.getDocumentElement()); + List schedulerConfData = schedulerConf.getSchedulerConfigurationData(); + assertEquals(schedulerConfData.size(), 2); + for (SchedulerConfigurationData data : schedulerConfData) { + if (data.getSchedulerName().equals("airflow")) { + assertEquals("com.linkedin.drelephant.schedulers.AirflowScheduler", data.getClassName()); + assertEquals("http://localhost:8000", data.getParamMap().get("airflowbaseurl")); + } else { + assertEquals("azkaban", data.getSchedulerName()); + assertEquals("com.linkedin.drelephant.schedulers.AzkabanScheduler", data.getClassName()); + } + } + } + + /** + * No classname tag + */ + @Test + public void testParseSchedulerConf2() { + expectedEx.expect(RuntimeException.class); + expectedEx.expectMessage("No tag 'classname' in scheduler 1"); + new SchedulerConfiguration(document2.getDocumentElement()); + } + + /** + * No name tag + */ + @Test + public void testParseSchedulerConf3() { + expectedEx.expect(RuntimeException.class); + expectedEx.expectMessage("No tag 'name' in scheduler 2 classname com.linkedin.drelephant.schedulers.AzkabanScheduler"); + new SchedulerConfiguration(document3.getDocumentElement()); + } + +} diff --git a/test/com/linkedin/drelephant/exceptions/EventExceptionTest.java b/test/com/linkedin/drelephant/exceptions/EventExceptionTest.java new file mode 100644 index 000000000..e0086e1be --- /dev/null +++ b/test/com/linkedin/drelephant/exceptions/EventExceptionTest.java @@ -0,0 +1,57 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.exceptions; + +import org.apache.log4j.Logger; +import org.junit.Test; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; + + +public class EventExceptionTest { + private static final Logger logger = Logger.getLogger(EventExceptionTest.class); + + @Test + public void getMessageTest() { + + String rawEventException = + "java.io.FileNotFoundException: Path is not a file: /data/sample/Sample/Sample/1466675602538-PT-472724050\n" + + "24-06-2016 03:12:40 PDT help_center_sessions INFO - \tat org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:70)\n" + + "24-06-2016 03:12:40 PDT help_center_sessions INFO - \tat org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56)\n" + + "24-06-2016 03:12:40 PDT help_center_sessions INFO - \tat org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1914)\n" + + "24-06-2016 03:12:40 PDT help_center_sessions INFO - \tat org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:1855)\n" + + "24-06-2016 03:12:40 PDT help_center_sessions INFO - \tat org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1835)\n" + + "24-06-2016 03:12:40 PDT help_center_sessions INFO - \tat org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1807)\n" + + "24-06-2016 03:12:40 PDT help_center_sessions INFO - \tat org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:552)\n" + + "24-06-2016 03:12:40 PDT help_center_sessions INFO - \tat org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:362)\n" + + "24-06-2016 03:12:40 PDT help_center_sessions INFO - \tat org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)\n" + + "24-06-2016 03:12:40 PDT help_center_sessions INFO - \tat org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:619)\n" + + "24-06-2016 03:12:40 PDT help_center_sessions INFO - \tat org.apache.hadoop.ipc.RPC$Server.call(RPC.java:962)\n" + + "24-06-2016 03:12:40 PDT help_center_sessions INFO - \tat org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2044)\n" + + "24-06-2016 03:12:40 PDT help_center_sessions INFO - \tat org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2040)\n" + + "24-06-2016 03:12:40 PDT help_center_sessions INFO - \tat java.security.AccessController.doPrivileged(Native Method)\n" + + "24-06-2016 03:12:40 PDT help_center_sessions INFO - \tat javax.security.auth.Subject.doAs(Subject.java:422)\n" + + "24-06-2016 03:12:40 PDT help_center_sessions INFO - \tat org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)\n" + + "24-06-2016 03:12:40 PDT help_center_sessions INFO - \tat org.apache.hadoop.ipc.Server$Handler.run(Server.java:2038)"; + + EventException eventException = new EventException(0, rawEventException); + assertTrue("getMessageTest failed", eventException.getMessage() + .equals("Path is not a file: /data/sample/Sample/Sample/1466675602538-PT-472724050")); + logger.info("correct message" + eventException.getMessage()); + } +} \ No newline at end of file diff --git a/test/com/linkedin/drelephant/exceptions/MRJobLogAnalyzerTest.java b/test/com/linkedin/drelephant/exceptions/MRJobLogAnalyzerTest.java new file mode 100644 index 000000000..8942c0b6a --- /dev/null +++ b/test/com/linkedin/drelephant/exceptions/MRJobLogAnalyzerTest.java @@ -0,0 +1,50 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.exceptions; + +import org.junit.Test; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertEquals; + +public class MRJobLogAnalyzerTest { + + private String failedMRJobDiagnostic = "Task failed task_1466048666726_979739_r_000000\n" + + "Job failed as tasks failed. failedMaps:0 failedReduces:1"; + + private String killedMRJobDiagnostic = "Kill job job_1466048666726_978316 received from zfu@LINKEDIN.BIZ (auth:TOKEN) at 10.150.4.50\n" + + "Job received Kill while in RUNNING state."; + + private MRJobLogAnalyzer analyzedFailedJobDiagnostic; + private MRJobLogAnalyzer analyzedKilledJobDiagnostic; + + public MRJobLogAnalyzerTest(){ + analyzedFailedJobDiagnostic = new MRJobLogAnalyzer(failedMRJobDiagnostic); + analyzedKilledJobDiagnostic = new MRJobLogAnalyzer(killedMRJobDiagnostic); + } + + @Test + public void getFailedSubEventsTest(){ + assertEquals(analyzedFailedJobDiagnostic.getFailedSubEvents().size(),1 ); + assertTrue(analyzedKilledJobDiagnostic.getFailedSubEvents().isEmpty()); + } + + @Test + public void getExceptionTest(){ + + } +} \ No newline at end of file diff --git a/test/com/linkedin/drelephant/exceptions/azkaban/AzkabanJobLogAnalyzerTest.java b/test/com/linkedin/drelephant/exceptions/azkaban/AzkabanJobLogAnalyzerTest.java new file mode 100644 index 000000000..b26478a95 --- /dev/null +++ b/test/com/linkedin/drelephant/exceptions/azkaban/AzkabanJobLogAnalyzerTest.java @@ -0,0 +1,118 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.exceptions.azkaban; + +import com.linkedin.drelephant.exceptions.JobState; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; + + +public class AzkabanJobLogAnalyzerTest { + + private String succeededAzkabanJobLog = "24-06-2016 03:12:53 PDT extractCrawlCompanyIndustryClassificationFlow_extractionFinalizer INFO - Starting job sample_sample at 1466763173873\n" + + "24-06-2016 03:12:53 PDT extractCrawlCompanyIndustryClassificationFlow_extractionFinalizer INFO - job JVM args: -Dazkaban.flowid=sampleFlow -Dazkaban.execid=557260 -Dazkaban.jobid=sample_jobr\n" + + "24-06-2016 03:12:55 PDT extractCrawlCompanyIndustryClassificationFlow_extractionFinalizer INFO - Finishing job sample_job attempt: 0 at 1466763175040 with status SUCCEEDED"; + + private String killedAzkabanJobLog = "28-06-2016 16:58:20 PDT feature-exploration_create-index-map INFO - Starting job sample at 1467158300703\n" + + "28-06-2016 17:58:05 PDT feature-exploration_create-index-map ERROR - Kill has been called.\n" + + "28-06-2016 17:58:05 PDT feature-exploration_create-index-map INFO - 16/06/29 00:58:05 INFO util.Utils: Shutdown hook called\n" + + "28-06-2016 17:58:06 PDT feature-exploration_create-index-map ERROR - caught error running the job\n" + + "28-06-2016 17:58:06 PDT feature-exploration_create-index-map INFO - Token service: sample-localhostrm01.grid.linkedin.com:8032\n" + + "28-06-2016 17:58:06 PDT feature-exploration_create-index-map INFO - Cancelling mr job tracker token \n" + + "28-06-2016 17:58:06 PDT feature-exploration_create-index-map ERROR - Job run failed!\n" + + "28-06-2016 17:58:06 PDT feature-exploration_create-index-map ERROR - java.lang.RuntimeException: azkaban.jobExecutor.utils.process.ProcessFailureException cause: java.lang.RuntimeException: azkaban.jobExecutor.utils.process.ProcessFailureException\n" + + "28-06-2016 17:58:06 PDT feature-exploration_create-index-map INFO - Finishing job feature-exploration_create-index-map attempt: 0 at 1467161886022 with status KILLED\n" + + "28-06-2016 17:58:06 PDT feature-exploration_create-index-map INFO - applicationIds to kill: [application_1466048666726_642278]\n" + + "28-06-2016 17:58:06 PDT feature-exploration_create-index-map INFO - start klling application: application_1466048666726_642278\n" + + "28-06-2016 17:58:06 PDT feature-exploration_create-index-map INFO - successfully killed application: application_1466048666726_642278"; + + private String mrLevelFailedAzkabanJobLog = "24-06-2016 03:12:19 PDT help_center_sessions INFO - Starting job help_center_sessions at 1466763139993\n" + + "24-06-2016 03:12:25 PDT help_center_sessions INFO - INFO Kind: HDFS_DELEGATION_TOKEN, Service: sample-localhostnn01.grid.linkedin.com:9000, Ident: (HDFS_DELEGATION_TOKEN token 5017233 for username)\n" + + "24-06-2016 03:12:26 PDT help_center_sessions INFO - INFO Submitted application application_1466048666726_410150\n" + + "24-06-2016 03:12:26 PDT help_center_sessions INFO - INFO Running job: job_1466048666726_410150\n" + + "24-06-2016 03:12:33 PDT help_center_sessions INFO - INFO Job job_1466048666726_410150 running in uber mode : false\n" + + "24-06-2016 03:12:40 PDT help_center_sessions INFO - Error: java.io.FileNotFoundException: Path is not a file: /data/databases/sample/Sample/1466675602538-PT-472724050\n" + + "24-06-2016 03:12:40 PDT help_center_sessions INFO - \tat org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:70)\n" + + "24-06-2016 03:12:40 PDT help_center_sessions INFO - \tat org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)\n" + + "24-06-2016 03:12:40 PDT help_center_sessions INFO - \tat org.apache.hadoop.ipc.Server$Handler.run(Server.java:2038\n" + + "24-06-2016 03:13:00 PDT help_center_sessions ERROR - Job run failed!\n" + + "24-06-2016 03:13:00 PDT help_center_sessions ERROR - java.lang.RuntimeException: azkaban.jobExecutor.utils.process.ProcessFailureException cause: java.lang.RuntimeException: azkaban.jobExecutor.utils.process.ProcessFailureException\n" + + "24-06-2016 03:13:00 PDT help_center_sessions INFO - Finishing job help_center_sessions attempt: 0 at 1466763180242 with status FAILED"; + + private String scriptLevelFailedAzkabanJobLog="28-06-2016 16:23:10 PDT job_search_trigger INFO - Starting job job_search_trigger at 1467156190329\n" + + "28-06-2016 16:23:12 PDT job_search_trigger INFO - INFO Last attempt: false\n" + + "28-06-2016 16:23:12 PDT job_search_trigger INFO - Exception in thread \"main\" java.lang.reflect.UndeclaredThrowableException\n" + + "28-06-2016 16:23:12 PDT job_search_trigger INFO - \tat org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1686)\n" + + "28-06-2016 16:23:12 PDT job_search_trigger INFO - \t... 3 more\n" + + "28-06-2016 16:23:12 PDT job_search_trigger INFO - Caused by: java.lang.RuntimeException: Backfill requires start and end date\n" + + "28-06-2016 16:23:12 PDT job_search_trigger INFO - \tat com.linkedin.metrics.feeder.TriggerJob.generateDaily(TriggerJob.java:143)\n" + + "28-06-2016 16:23:12 PDT job_search_trigger INFO - \tat com.linkedin.metrics.feeder.TriggerJob.run(TriggerJob.java:135)\n" + + "28-06-2016 16:23:12 PDT job_search_trigger INFO - \t... 14 more\n" + + "28-06-2016 16:23:12 PDT job_search_trigger INFO - Process completed unsuccessfully in 1 seconds.\n" + + "28-06-2016 16:23:12 PDT job_search_trigger ERROR - Job run failed!\n" + + "28-06-2016 16:23:12 PDT job_search_trigger ERROR - java.lang.RuntimeException: azkaban.jobExecutor.utils.process.ProcessFailureException cause: java.lang.RuntimeException: azkaban.jobExecutor.utils.process.ProcessFailureException\n" + + "28-06-2016 16:23:12 PDT job_search_trigger INFO - Finishing job job_search_trigger attempt: 0 at 1467156192215 with status FAILED"; + + private String azkabanLevelFailedAzkabanJobLog = "28-06-2016 13:45:27 PDT feature-exploration_create-index-map INFO - Starting job feature-exploration_create-index-map at 1467146727699\n" + + "28-06-2016 13:45:27 PDT feature-exploration_create-index-map INFO - job JVM args: -Dazkaban.flowid=feature-exploration -Dazkaban.execid=593197 -Dazkaban.jobid=feature-exploration_create-index-map\n" + + "28-06-2016 13:45:27 PDT feature-exploration_create-index-map INFO - Building spark job executor. \n" + + "28-06-2016 13:45:27 PDT feature-exploration_create-index-map ERROR - Failed to build job executor for job feature-exploration_create-index-mapCould not find variable substitution for variable(s) [global.jvm.args->user.to.proxy]\n" + + "28-06-2016 13:45:27 PDT feature-exploration_create-index-map ERROR - Failed to build job type\n" + + "azkaban.jobtype.JobTypeManagerException: Failed to build job executor for job feature-exploration_create-index-map\n" + + "28-06-2016 13:45:27 PDT feature-exploration_create-index-map ERROR - Job run failed preparing the job.\n" + + "28-06-2016 13:45:27 PDT feature-exploration_create-index-map INFO - Finishing job feature-exploration_create-index-map attempt: 0 at 1467146727702 with status FAILED"; + + private AzkabanJobLogAnalyzer analyzedSucceededLog; + private AzkabanJobLogAnalyzer analyzedKilledLog; + private AzkabanJobLogAnalyzer analyzedMRLevelFailedLog; + private AzkabanJobLogAnalyzer analyzedScriptLevelFailedLog; + private AzkabanJobLogAnalyzer analyzedAzkabanLevelFailedLog; + + public AzkabanJobLogAnalyzerTest(){ + analyzedSucceededLog = new AzkabanJobLogAnalyzer(succeededAzkabanJobLog); + analyzedKilledLog = new AzkabanJobLogAnalyzer(killedAzkabanJobLog); + analyzedMRLevelFailedLog = new AzkabanJobLogAnalyzer(mrLevelFailedAzkabanJobLog); + analyzedScriptLevelFailedLog = new AzkabanJobLogAnalyzer(scriptLevelFailedAzkabanJobLog); + analyzedAzkabanLevelFailedLog = new AzkabanJobLogAnalyzer(azkabanLevelFailedAzkabanJobLog); + } + @Test + public void getStateTest(){ + assertTrue(analyzedSucceededLog.getState() == JobState.SUCCEEDED); + assertTrue(analyzedKilledLog.getState() == JobState.KILLED); + assertTrue(analyzedMRLevelFailedLog.getState() == JobState.MRFAIL); + assertTrue(analyzedScriptLevelFailedLog.getState() == JobState.SCRIPTFAIL); + assertTrue(analyzedAzkabanLevelFailedLog.getState() == JobState.SCHEDULERFAIL); + } + + @Test + public void getSubEventsTest(){ + assertTrue("Succeeded sub events test failed",analyzedSucceededLog.getSubEvents().isEmpty()); + assertTrue("Script level failed sub events test failed",analyzedScriptLevelFailedLog.getSubEvents().isEmpty()); + assertTrue("Azkaban level failed sub events test failed",analyzedAzkabanLevelFailedLog.getSubEvents().isEmpty()); + assertTrue(analyzedMRLevelFailedLog.getSubEvents().size() == 1); + assertTrue(analyzedMRLevelFailedLog.getSubEvents().iterator().next().equals("job_1466048666726_410150")); + assertTrue("Killed sub events test failed",analyzedKilledLog.getSubEvents().isEmpty()); + } + + @Test + public void getExceptionsTest(){ + assertTrue(analyzedSucceededLog.getException() == null); + assertTrue(analyzedKilledLog.getException() == null); + } +} \ No newline at end of file diff --git a/test/com/linkedin/drelephant/mapreduce/TestTaskLevelAggregatedMetrics.java b/test/com/linkedin/drelephant/mapreduce/TestTaskLevelAggregatedMetrics.java new file mode 100644 index 000000000..d3c678fe7 --- /dev/null +++ b/test/com/linkedin/drelephant/mapreduce/TestTaskLevelAggregatedMetrics.java @@ -0,0 +1,59 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce; + +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import org.junit.Assert; +import org.junit.Test; + +public class TestTaskLevelAggregatedMetrics { + + @Test + public void testZeroTasks() { + MapReduceTaskData taskData[] = {}; + TaskLevelAggregatedMetrics taskMetrics = new TaskLevelAggregatedMetrics(taskData, 0, 0); + Assert.assertEquals(taskMetrics.getDelay(), 0); + Assert.assertEquals(taskMetrics.getResourceUsed(), 0); + Assert.assertEquals(taskMetrics.getResourceWasted(), 0); + } + + @Test + public void testNullTaskArray() { + TaskLevelAggregatedMetrics taskMetrics = new TaskLevelAggregatedMetrics(null, 0, 0); + Assert.assertEquals(taskMetrics.getDelay(), 0); + Assert.assertEquals(taskMetrics.getResourceUsed(), 0); + Assert.assertEquals(taskMetrics.getResourceWasted(), 0); + } + + @Test + public void testTaskLevelData() { + MapReduceTaskData taskData[] = new MapReduceTaskData[2]; + MapReduceCounterData counterData = new MapReduceCounterData(); + counterData.set(MapReduceCounterData.CounterName.PHYSICAL_MEMORY_BYTES, 655577088L); + counterData.set(MapReduceCounterData.CounterName.VIRTUAL_MEMORY_BYTES, 3051589632L); + long time[] = {0,0,0,1464218501117L, 1464218534148L}; + taskData[0] = new MapReduceTaskData("task", "id"); + taskData[0].setTimeAndCounter(time, counterData); + taskData[1] = new MapReduceTaskData("task", "id"); + taskData[1].setTimeAndCounter(new long[5], counterData); + TaskLevelAggregatedMetrics taskMetrics = new TaskLevelAggregatedMetrics(taskData, 4096L, 1463218501117L); + Assert.assertEquals(taskMetrics.getDelay(), 1000000000L); + Assert.assertEquals(taskMetrics.getResourceUsed(), 135168L); + Assert.assertEquals(taskMetrics.getResourceWasted(), 66627L); + } +} diff --git a/test/com/linkedin/drelephant/mapreduce/fetchers/MapReduceFSFetcherHadoop2Test.java b/test/com/linkedin/drelephant/mapreduce/fetchers/MapReduceFSFetcherHadoop2Test.java new file mode 100644 index 000000000..bf020ea45 --- /dev/null +++ b/test/com/linkedin/drelephant/mapreduce/fetchers/MapReduceFSFetcherHadoop2Test.java @@ -0,0 +1,217 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.fetchers; + +import com.linkedin.drelephant.analysis.AnalyticJob; +import com.linkedin.drelephant.configurations.fetcher.FetcherConfiguration; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.mapreduce.Counters; +import org.apache.hadoop.mapreduce.TaskAttemptID; +import org.apache.hadoop.mapreduce.TaskID; +import org.apache.hadoop.mapreduce.TaskType; +import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; +import org.w3c.dom.Document; +import org.xml.sax.SAXException; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.List; +import java.util.Map; +import java.util.HashMap; +import java.util.TimeZone; + +public class MapReduceFSFetcherHadoop2Test { + + private static Document document9 = null; + private static Document document10 = null; + private static Document document11 = null; + + @BeforeClass + public static void before() { + try { + DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); + DocumentBuilder builder = factory.newDocumentBuilder(); + ClassLoader classLoader = MapReduceFSFetcherHadoop2Test.class.getClassLoader(); + document9 = builder.parse(classLoader.getResourceAsStream( + "configurations/fetcher/FetcherConfTest9.xml")); + document10 = builder.parse(classLoader.getResourceAsStream( + "configurations/fetcher/FetcherConfTest10.xml")); + document11 = builder.parse(classLoader.getResourceAsStream( + "configurations/fetcher/FetcherConfTest11.xml")); + } catch (ParserConfigurationException e) { + throw new RuntimeException("XML Parser could not be created.", e); + } catch (SAXException e) { + throw new RuntimeException("Test files are not properly formed", e); + } catch (IOException e) { + throw new RuntimeException("Unable to read test files ", e); + } + } + + @Test + public void testFetcherDefaultConfig() { + FetcherConfiguration fetcherConf = new FetcherConfiguration(document9.getDocumentElement()); + try { + MapReduceFSFetcherHadoop2 fetcher = new MapReduceFSFetcherHadoop2( + fetcherConf.getFetchersConfigurationData().get(0)); + Assert.assertFalse("Sampling should be disabled in default", fetcher.isSamplingEnabled()); + Assert.assertEquals(fetcher.DEFALUT_MAX_LOG_SIZE_IN_MB, fetcher.getMaxLogSizeInMB(), 0.0001); + Assert.assertEquals(TimeZone.getDefault(), fetcher.getTimeZone()); + + List list = new ArrayList(); + int listLen = fetcher.MAX_SAMPLE_SIZE * 2; + for (int i = 0; i < listLen; i++) { + list.add(0); + } + Assert.assertEquals("Should not sample task list when sampling is disabled", listLen, + fetcher.sampleAndGetSize("appId", list)); + } catch (IOException e) { + Assert.assertNull("Failed to initialize FileSystem", e); + } + } + + @Test + public void testFetcherConfig() { + FetcherConfiguration fetcherConf = new FetcherConfiguration(document10.getDocumentElement()); + try { + MapReduceFSFetcherHadoop2 fetcher = new MapReduceFSFetcherHadoop2( + fetcherConf.getFetchersConfigurationData().get(0)); + Assert.assertTrue("Failed to enable sampling", fetcher.isSamplingEnabled()); + Assert.assertEquals(200d, fetcher.getMaxLogSizeInMB(), 0.0001); + Assert.assertEquals(TimeZone.getTimeZone("PST"), fetcher.getTimeZone()); + + List list = new ArrayList(); + int listLen = fetcher.MAX_SAMPLE_SIZE * 2; + for (int i = 0; i < listLen; i++) { + list.add(0); + } + Assert.assertEquals("Should sample task list when sampling is enabled", fetcher.MAX_SAMPLE_SIZE, + fetcher.sampleAndGetSize("appId", list)); + } catch (IOException e) { + Assert.assertNull("Failed to initialize FileSystem", e); + } + } + + @Test + public void testFetcherEmptyConf() { + FetcherConfiguration fetcherConf = new FetcherConfiguration(document11.getDocumentElement()); + try { + MapReduceFSFetcherHadoop2 fetcher = new MapReduceFSFetcherHadoop2( + fetcherConf.getFetchersConfigurationData().get(0)); + Assert.assertFalse("Sampling should be disabled in default", fetcher.isSamplingEnabled()); + Assert.assertEquals(fetcher.DEFALUT_MAX_LOG_SIZE_IN_MB, fetcher.getMaxLogSizeInMB(), 0.0001); + Assert.assertEquals(TimeZone.getDefault(), fetcher.getTimeZone()); + + List list = new ArrayList(); + int listLen = fetcher.MAX_SAMPLE_SIZE * 2; + for (int i = 0; i < listLen; i++) { + list.add(0); + } + Assert.assertEquals("Should not sample task list when sampling is disabled", listLen, + fetcher.sampleAndGetSize("appId", list)); + } catch (IOException e) { + Assert.assertNull("Failed to initialize FileSystem", e); + } + } + + @Test + public void testGetHistoryDir() { + FetcherConfiguration fetcherConf = new FetcherConfiguration(document9.getDocumentElement()); + try { + MapReduceFSFetcherHadoop2 fetcher = new MapReduceFSFetcherHadoop2( + fetcherConf.getFetchersConfigurationData().get(0)); + Calendar timestamp = Calendar.getInstance(); + timestamp.set(2016, Calendar.JULY, 30); + AnalyticJob job = new AnalyticJob() + .setAppId("application_1461566847127_84624") + .setFinishTime(timestamp.getTimeInMillis()); + + String expected = StringUtils.join(new String[]{fetcher.getHistoryLocation(), "2016", "07", "30", "000084", ""}, File.separator); + Assert.assertEquals("Error history directory", expected, fetcher.getHistoryDir(job)); + } catch (IOException e) { + Assert.assertNull("Failed to initialize FileSystem", e); + } + } + + @Test + public void testGetTaskData() { + FetcherConfiguration fetcherConf = new FetcherConfiguration(document9.getDocumentElement()); + + try { + MapReduceFSFetcherHadoop2 fetcher = new MapReduceFSFetcherHadoop2( + fetcherConf.getFetchersConfigurationData().get(0)); + String jobId = "job_14000_001"; + List infoList = new ArrayList(); + infoList.add(new MockTaskInfo(1, true)); + infoList.add(new MockTaskInfo(2, false)); + + MapReduceTaskData[] taskList = fetcher.getTaskData(jobId, infoList); + Assert.assertNotNull("taskList should not be null.", taskList); + for (MapReduceTaskData task : taskList) { + Assert.assertNotNull("Null pointer in taskList.", task); + } + Assert.assertEquals("Should have only one succeeded task.", 1, taskList.length); + } catch (IOException e) { + Assert.assertNull("Failed to initialize FileSystem.", e); + } + } + + class MockTaskInfo extends JobHistoryParser.TaskInfo { + TaskID taskId; + TaskType taskType; + boolean succeeded; + Counters counters; + long startTime, finishTime; + TaskAttemptID failedDueToAttemptId; + TaskAttemptID successfulAttemptId; + Map attemptsMap; + + public MockTaskInfo(int id, boolean succeeded) { + this.taskId = new TaskID("job1", 1, TaskType.MAP, id); + this.taskType = TaskType.MAP; + this.succeeded = succeeded; + this.counters = new Counters(); + this.finishTime = System.currentTimeMillis(); + this.startTime = finishTime - 10000; + this.failedDueToAttemptId = new TaskAttemptID(taskId, 0); + this.successfulAttemptId = new TaskAttemptID(taskId, 1); + this.attemptsMap = new HashMap(); + this.attemptsMap.put(failedDueToAttemptId, new JobHistoryParser.TaskAttemptInfo()); + this.attemptsMap.put(successfulAttemptId, new JobHistoryParser.TaskAttemptInfo()); + } + + public TaskID getTaskId() { return taskId; } + public long getStartTime() { return startTime; } + public long getFinishTime() { return finishTime; } + public Counters getCounters() { return counters; } + public TaskType getTaskType() { return taskType; } + public String getTaskStatus() { return succeeded ? "SUCCEEDED" : "FAILED"; } + public TaskAttemptID getSuccessfulAttemptId() { return successfulAttemptId; } + public TaskAttemptID getFailedDueToAttemptId() { return failedDueToAttemptId; } + public Map getAllTaskAttempts() { + return attemptsMap; + } + } +} diff --git a/test/com/linkedin/drelephant/mapreduce/fetchers/MapReduceFetcherHadoop2Test.java b/test/com/linkedin/drelephant/mapreduce/fetchers/MapReduceFetcherHadoop2Test.java new file mode 100644 index 000000000..94ab6b6a7 --- /dev/null +++ b/test/com/linkedin/drelephant/mapreduce/fetchers/MapReduceFetcherHadoop2Test.java @@ -0,0 +1,35 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.fetchers; + +import org.junit.Assert; +import org.junit.Test; + + +public class MapReduceFetcherHadoop2Test { + + @Test + public void testDiagnosticMatcher() { + Assert.assertEquals("Task[\\s\\u00A0]+(.*)[\\s\\u00A0]+failed[\\s\\u00A0]+([0-9])[\\s\\u00A0]+times[\\s\\u00A0]+", + ThreadContextMR2.getDiagnosticMatcher("Task task_1443068695259_9143_m_000475 failed 1 time") + .pattern().toString()); + + Assert.assertEquals(2, ThreadContextMR2.getDiagnosticMatcher("Task task_1443068695259_9143_m_000475 failed 1 time") + .groupCount()); + } + +} diff --git a/test/com/linkedin/drelephant/mapreduce/heuristics/DistributedCacheLimitHeuristicTest.java b/test/com/linkedin/drelephant/mapreduce/heuristics/DistributedCacheLimitHeuristicTest.java new file mode 100644 index 000000000..b655a68d3 --- /dev/null +++ b/test/com/linkedin/drelephant/mapreduce/heuristics/DistributedCacheLimitHeuristicTest.java @@ -0,0 +1,137 @@ +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.analysis.ApplicationType; +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertTrue; + + +/** + * Tests for the DistributedCacheLimitHeuristic class. + */ +public class DistributedCacheLimitHeuristicTest { + private static Map paramMap = new HashMap(); + private static Properties jobConf = new Properties(); + private static final String cacheFileList = + "/path/to/firstCacheFile,/path/to/secondCacheFile,/path/to/thirdCacheFile"; + private static final String archiveCacheFileList = + "/path/to/firstArchiveCacheFile,/path/to/secondArchiveCacheFile,/path/to/thirdArchiveCacheFile"; + + private static Heuristic _heuristic = new DistributedCacheLimitHeuristic( + new HeuristicConfigurationData("test.heuristic", "test.class", "test.view", new ApplicationType("mapreduce"), + paramMap)); + + @Before + public void setup() { + paramMap.put("distributed.cache.file.size.limit", "500000000"); + jobConf.setProperty("mapreduce.job.cache.files", cacheFileList); + jobConf.setProperty("mapreduce.job.cache.archives", archiveCacheFileList); + } + + /** + * All cache file sizes are within the limit. + */ + @Test + public void testHeuristicResult() { + jobConf.setProperty("mapreduce.job.cache.files.filesizes", "100,200,300"); + jobConf.setProperty("mapreduce.job.cache.archives.filesizes", "400,500,600"); + + MapReduceApplicationData data = new MapReduceApplicationData().setJobConf(jobConf); + HeuristicResult result = _heuristic.apply(data); + assertTrue("Failed to match on expected severity", result.getSeverity() == Severity.NONE); + } + + /** + * File size not found for all the files in cache. + */ + @Test + public void testHeuristicResultCacheFilesAndSizeLengthMismatch() { + jobConf.setProperty("mapreduce.job.cache.files.filesizes", "100,200"); + MapReduceApplicationData data = new MapReduceApplicationData().setJobConf(jobConf); + HeuristicResult result = _heuristic.apply(data); + assertTrue("Failed to match on expected severity", result.getSeverity() == Severity.MODERATE); + } + + /** + * File size not found for all the files in archive cache. + */ + @Test + public void testHeuristicResultArchiveCacheFilesAndSizeLengthMismatch() { + jobConf.setProperty("mapreduce.job.cache.files.filesizes", "100,200,300"); + jobConf.setProperty("mapreduce.job.cache.archives.filesizes", "400,500"); + MapReduceApplicationData data = new MapReduceApplicationData().setJobConf(jobConf); + HeuristicResult result = _heuristic.apply(data); + assertTrue("Failed to match on expected severity", result.getSeverity() == Severity.MODERATE); + } + + /** + * File size limit exceeded for file in cache. + */ + @Test + public void testHeuristicResultCacheFileLimitViolated() { + jobConf.setProperty("mapreduce.job.cache.files.filesizes", "100,200,600000000"); + jobConf.setProperty("mapreduce.job.cache.archives.filesizes", "400,500,600"); + + MapReduceApplicationData data = new MapReduceApplicationData().setJobConf(jobConf); + HeuristicResult result = _heuristic.apply(data); + assertTrue("Failed to match on expected severity", result.getSeverity() == Severity.CRITICAL); + } + + /** + * File size limit exceeded for file in archive cache. + */ + @Test + public void testHeuristicResultArchiveCacheFileLimitViolated() { + jobConf.setProperty("mapreduce.job.cache.files.filesizes", "100,200,300"); + jobConf.setProperty("mapreduce.job.cache.archives.filesizes", "400,500,600000000"); + + MapReduceApplicationData data = new MapReduceApplicationData().setJobConf(jobConf); + HeuristicResult result = _heuristic.apply(data); + assertTrue("Failed to match on expected severity", result.getSeverity() == Severity.CRITICAL); + } + + /** + * Either of the caches are not used by the application. + */ + @Test + public void testHeuristicResultNoDistributedCacheFiles() { + jobConf.remove("mapreduce.job.cache.files"); + jobConf.remove("mapreduce.job.cache.archives"); + MapReduceApplicationData data = new MapReduceApplicationData().setJobConf(jobConf); + HeuristicResult result = _heuristic.apply(data); + assertTrue("Failed to match on expected severity", result == null); + } + + /** + * Cache files are not used by the application. + */ + @Test + public void testHeuristicResultWithEmptyCacheFiles() { + jobConf.remove("mapreduce.job.cache.files"); + jobConf.setProperty("mapreduce.job.cache.archives.filesizes", "400,500,600"); + MapReduceApplicationData data = new MapReduceApplicationData().setJobConf(jobConf); + HeuristicResult result = _heuristic.apply(data); + assertTrue("Failed to match on expected severity", result.getSeverity() == Severity.NONE); + } + + /** + * Archive cache not used by the application. + */ + @Test + public void testHeuristicResultWithEmptyArchiveCacheFiles() { + jobConf.remove("mapreduce.job.cache.archives"); + jobConf.setProperty("mapreduce.job.cache.files.filesizes", "100,200,300"); + MapReduceApplicationData data = new MapReduceApplicationData().setJobConf(jobConf); + HeuristicResult result = _heuristic.apply(data); + assertTrue("Failed to match on expected severity", result.getSeverity() == Severity.NONE); + } +} diff --git a/test/com/linkedin/drelephant/mapreduce/heuristics/JobQueueLimitHeuristicTest.java b/test/com/linkedin/drelephant/mapreduce/heuristics/JobQueueLimitHeuristicTest.java new file mode 100644 index 000000000..bd09c8717 --- /dev/null +++ b/test/com/linkedin/drelephant/mapreduce/heuristics/JobQueueLimitHeuristicTest.java @@ -0,0 +1,91 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.analysis.ApplicationType; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; + +import org.junit.Test; + +import junit.framework.TestCase; + +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; + + +public class JobQueueLimitHeuristicTest extends TestCase { + private static Map paramMap = new HashMap(); + private static Heuristic _heuristic = new JobQueueLimitHeuristic(new HeuristicConfigurationData("test_heuristic", "test_class", + "test_view", new ApplicationType("test_apptype"), paramMap)); + + private static final int NUM_TASKS = 100; + + @Test + public void testRuntimeCritical() throws IOException { + assertEquals(Severity.CRITICAL, analyzeJob((long) (14.5 * 60 * 1000), "default")); + } + + public void testRuntimeSevere() throws IOException { + assertEquals(Severity.SEVERE, analyzeJob(14 * 60 * 1000, "default")); + } + + public void testRuntimeModerate() throws IOException { + assertEquals(Severity.MODERATE, analyzeJob((long) (13.5 * 60 * 1000), "default")); + } + + public void testRuntimeLow() throws IOException { + assertEquals(Severity.LOW, analyzeJob(13 * 60 * 1000, "default")); + } + + public void testRuntimeNone() throws IOException { + assertEquals(Severity.NONE, analyzeJob(12 * 60 * 1000, "default")); + } + + public void testNonDefaultRuntimeNone() throws IOException { + assertEquals(Severity.NONE, analyzeJob(15 * 60 * 1000, "non-default")); + } + + private Severity analyzeJob(long runtimeMs, String queueName) throws IOException { + MapReduceCounterData dummyCounter = new MapReduceCounterData(); + MapReduceTaskData[] mappers = new MapReduceTaskData[2 * NUM_TASKS / 3]; + MapReduceTaskData[] reducers = new MapReduceTaskData[NUM_TASKS / 3]; + Properties jobConf = new Properties(); + jobConf.put("mapred.job.queue.name", queueName); + int i = 0; + for (; i < 2 * NUM_TASKS / 3; i++) { + mappers[i] = new MapReduceTaskData("task-id-"+i, "task-attempt-id-"+i); + mappers[i].setTimeAndCounter(new long[] { runtimeMs, 0, 0, 0, 0 }, dummyCounter); + } + for (i = 0; i < NUM_TASKS / 3; i++) { + reducers[i] = new MapReduceTaskData("task-id-"+i, "task-attempt-id-"+i); + reducers[i].setTimeAndCounter(new long[] { runtimeMs, 0, 0, 0, 0 }, dummyCounter); + } + MapReduceApplicationData data = + new MapReduceApplicationData().setCounters(dummyCounter).setReducerData(reducers).setMapperData(mappers) + .setJobConf(jobConf); + HeuristicResult result = _heuristic.apply(data); + return result.getSeverity(); + } +} diff --git a/test/com/linkedin/drelephant/mapreduce/heuristics/MapperDataSkewHeuristicTest.java b/test/com/linkedin/drelephant/mapreduce/heuristics/MapperDataSkewHeuristicTest.java new file mode 100644 index 000000000..539b2fbcd --- /dev/null +++ b/test/com/linkedin/drelephant/mapreduce/heuristics/MapperDataSkewHeuristicTest.java @@ -0,0 +1,98 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.analysis.ApplicationType; +import com.linkedin.drelephant.analysis.HDFSContext; +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; + +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import java.io.IOException; + +import java.util.HashMap; +import java.util.Map; +import junit.framework.TestCase; + + +public class MapperDataSkewHeuristicTest extends TestCase { + + private static final long UNITSIZE = HDFSContext.HDFS_BLOCK_SIZE / 64; //1MB + + private static Map paramsMap = new HashMap(); + private static Heuristic _heuristic = new MapperDataSkewHeuristic(new HeuristicConfigurationData("test_heuristic", + "test_class", "test_view", new ApplicationType("test_apptype"), paramsMap)); + + public void testCritical() throws IOException { + assertEquals(Severity.CRITICAL, analyzeJob(200, 200, 1 * UNITSIZE, 100 * UNITSIZE)); + } + + public void testSevere() throws IOException { + assertEquals(Severity.SEVERE, analyzeJob(200, 200, 10 * UNITSIZE, 100 * UNITSIZE)); + } + + public void testModerate() throws IOException { + assertEquals(Severity.MODERATE, analyzeJob(200, 200, 20 * UNITSIZE, 100 * UNITSIZE)); + } + + public void testLow() throws IOException { + assertEquals(Severity.LOW, analyzeJob(200, 200, 30 * UNITSIZE, 100 * UNITSIZE)); + } + + public void testNone() throws IOException { + assertEquals(Severity.NONE, analyzeJob(200, 200, 50 * UNITSIZE, 100 * UNITSIZE)); + } + + public void testSmallFiles() throws IOException { + assertEquals(Severity.NONE, analyzeJob(200, 200, 1 * UNITSIZE, 5 * UNITSIZE)); + } + + public void testSmallTasks() throws IOException { + assertEquals(Severity.NONE, analyzeJob(5, 5, 10 * UNITSIZE, 100 * UNITSIZE)); + } + + private Severity analyzeJob(int numSmallTasks, int numLargeTasks, long smallInputSize, long largeInputSize) + throws IOException { + MapReduceCounterData jobCounter = new MapReduceCounterData(); + MapReduceTaskData[] mappers = new MapReduceTaskData[numSmallTasks + numLargeTasks]; + + MapReduceCounterData smallCounter = new MapReduceCounterData(); + smallCounter.set(MapReduceCounterData.CounterName.HDFS_BYTES_READ, smallInputSize); + + MapReduceCounterData largeCounter = new MapReduceCounterData(); + largeCounter.set(MapReduceCounterData.CounterName.HDFS_BYTES_READ, largeInputSize); + + int i = 0; + for (; i < numSmallTasks; i++) { + mappers[i] = new MapReduceTaskData("task-id-"+i, "task-attempt-id-"+i); + mappers[i].setTimeAndCounter(new long[5], smallCounter); + } + for (; i < numSmallTasks + numLargeTasks; i++) { + mappers[i] = new MapReduceTaskData("task-id-"+i, "task-attempt-id-"+i); + mappers[i].setTimeAndCounter(new long[5], largeCounter); + } + + MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter).setMapperData(mappers); + HeuristicResult result = _heuristic.apply(data); + return result.getSeverity(); + + } +} diff --git a/test/com/linkedin/drelephant/mapreduce/heuristics/MapperGCHeuristicTest.java b/test/com/linkedin/drelephant/mapreduce/heuristics/MapperGCHeuristicTest.java new file mode 100644 index 000000000..4a7f8a4e7 --- /dev/null +++ b/test/com/linkedin/drelephant/mapreduce/heuristics/MapperGCHeuristicTest.java @@ -0,0 +1,81 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.analysis.ApplicationType; +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; + +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import junit.framework.TestCase; + + +public class MapperGCHeuristicTest extends TestCase { + + private static Map paramsMap = new HashMap(); + private static Heuristic _heuristic = new MapperGCHeuristic(new HeuristicConfigurationData("test_heuristic", + "test_class", "test_view", new ApplicationType("test_apptype"), paramsMap)); + + private static int NUMTASKS = 100; + + public void testGCCritical() throws IOException { + assertEquals(Severity.CRITICAL, analyzeJob(1000000, 50000, 2000)); + } + + public void testGCSevere() throws IOException { + assertEquals(Severity.SEVERE, analyzeJob(1000000, 50000, 1500)); + } + + public void testGCModerate() throws IOException { + assertEquals(Severity.MODERATE, analyzeJob(1000000, 50000, 1000)); + } + + public void testGCNone() throws IOException { + assertEquals(Severity.NONE, analyzeJob(1000000, 50000, 300)); + } + + public void testShortTasksNone() throws IOException { + assertEquals(Severity.NONE, analyzeJob(100000, 50000, 2000)); + } + + + private Severity analyzeJob(long runtimeMs, long cpuMs, long gcMs) throws IOException { + MapReduceCounterData jobCounter = new MapReduceCounterData(); + MapReduceTaskData[] mappers = new MapReduceTaskData[NUMTASKS]; + + MapReduceCounterData counter = new MapReduceCounterData(); + counter.set(MapReduceCounterData.CounterName.CPU_MILLISECONDS, cpuMs); + counter.set(MapReduceCounterData.CounterName.GC_MILLISECONDS, gcMs); + + int i = 0; + for (; i < NUMTASKS; i++) { + mappers[i] = new MapReduceTaskData("task-id-"+i, "task-attempt-id-"+i); + mappers[i].setTimeAndCounter(new long[]{runtimeMs, 0 , 0, 0, 0}, counter); + } + + MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter).setMapperData(mappers); + HeuristicResult result = _heuristic.apply(data); + return result.getSeverity(); + } +} diff --git a/test/com/linkedin/drelephant/mapreduce/heuristics/MapperMemoryHeuristicTest.java b/test/com/linkedin/drelephant/mapreduce/heuristics/MapperMemoryHeuristicTest.java new file mode 100644 index 000000000..28a53b6a2 --- /dev/null +++ b/test/com/linkedin/drelephant/mapreduce/heuristics/MapperMemoryHeuristicTest.java @@ -0,0 +1,92 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.analysis.ApplicationType; +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; + +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; + +import org.apache.commons.io.FileUtils; + +import junit.framework.TestCase; + + +public class MapperMemoryHeuristicTest extends TestCase { + + private static Map paramsMap = new HashMap(); + private static Heuristic _heuristic = new MapperMemoryHeuristic(new HeuristicConfigurationData("test_heuristic", + "test_class", "test_view", new ApplicationType("test_apptype"), paramsMap)); + + private int NUMTASKS = 100; + + public void testLargeContainerSizeCritical() throws IOException { + assertEquals(Severity.CRITICAL, analyzeJob(2048, 8192)); + } + + public void testLargeContainerSizeSevere() throws IOException { + assertEquals(Severity.SEVERE, analyzeJob(3072, 8192)); + } + + public void testLargeContainerSizeModerate() throws IOException { + assertEquals(Severity.MODERATE, analyzeJob(4096, 8192)); + } + + public void testLargeContainerSizeNone() throws IOException { + assertEquals(Severity.NONE, analyzeJob(6144, 8192)); + } + + // If the task use default container size, it should not be flagged + public void testDefaultContainerNone() throws IOException { + assertEquals(Severity.NONE, analyzeJob(256, 2048)); + } + + public void testDefaultContainerNoneMore() throws IOException { + assertEquals(Severity.NONE, analyzeJob(1024, 2048)); + } + + private Severity analyzeJob(long taskAvgMemMB, long containerMemMB) throws IOException { + MapReduceCounterData jobCounter = new MapReduceCounterData(); + MapReduceTaskData[] mappers = new MapReduceTaskData[NUMTASKS]; + + MapReduceCounterData counter = new MapReduceCounterData(); + counter.set(MapReduceCounterData.CounterName.PHYSICAL_MEMORY_BYTES, taskAvgMemMB* FileUtils.ONE_MB); + + Properties p = new Properties(); + p.setProperty(MapperMemoryHeuristic.MAPPER_MEMORY_CONF, Long.toString(containerMemMB)); + + int i = 0; + for (; i < NUMTASKS; i++) { + mappers[i] = new MapReduceTaskData("task-id-"+i, "task-attempt-id-"+i); + mappers[i].setTimeAndCounter(new long[5], counter); + } + + MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter).setMapperData(mappers); + data.setJobConf(p); + HeuristicResult result = _heuristic.apply(data); + return result.getSeverity(); + } +} diff --git a/test/com/linkedin/drelephant/mapreduce/heuristics/MapperSpeedHeuristicTest.java b/test/com/linkedin/drelephant/mapreduce/heuristics/MapperSpeedHeuristicTest.java new file mode 100644 index 000000000..93ea414bb --- /dev/null +++ b/test/com/linkedin/drelephant/mapreduce/heuristics/MapperSpeedHeuristicTest.java @@ -0,0 +1,100 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.analysis.ApplicationType; +import com.linkedin.drelephant.analysis.HDFSContext; +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.math.Statistics; + +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import java.io.IOException; + +import java.util.HashMap; +import java.util.Map; +import junit.framework.TestCase; +import org.apache.commons.io.FileUtils; + + +public class MapperSpeedHeuristicTest extends TestCase { + private static Map paramsMap = new HashMap(); + private static Heuristic _heuristic = new MapperSpeedHeuristic(new HeuristicConfigurationData("test_heuristic", + "test_class", "test_view", new ApplicationType("test_apptype"), paramsMap)); + + private static final long MB_IN_BYTES = FileUtils.ONE_MB; + private static final long MINUTE_IN_MS = Statistics.MINUTE_IN_MS; + private static final int NUMTASKS = 100; + + public void testCritical() throws IOException { + long runtime = 120 * MINUTE_IN_MS; + long speed_factor = (runtime * MB_IN_BYTES) / 1000; + assertEquals(Severity.CRITICAL, analyzeJob(runtime, 1 * speed_factor)); + } + + public void testSevere() throws IOException { + long runtime = 120 * MINUTE_IN_MS; + long speed_factor = (runtime * MB_IN_BYTES) / 1000; + assertEquals(Severity.SEVERE, analyzeJob(runtime, 4 * speed_factor)); + } + + public void testModerate() throws IOException { + long runtime = 120 * MINUTE_IN_MS; + long speed_factor = (runtime * MB_IN_BYTES) / 1000; + assertEquals(Severity.MODERATE, analyzeJob(runtime, 13 * speed_factor)); + } + + public void testLow() throws IOException { + long runtime = 120 * MINUTE_IN_MS; + long speed_factor = (runtime * MB_IN_BYTES) / 1000; + assertEquals(Severity.LOW, analyzeJob(runtime, 50 * speed_factor)); + } + + public void testNone() throws IOException { + long runtime = 120 * MINUTE_IN_MS; + long speed_factor = (runtime * MB_IN_BYTES) / 1000; + assertEquals(Severity.NONE, analyzeJob(runtime, 51 * speed_factor)); + } + + public void testShortTask() throws IOException { + long runtime = 2 * MINUTE_IN_MS; + long speed_factor = (runtime * MB_IN_BYTES) / 1000; + assertEquals(Severity.NONE, analyzeJob(runtime, 1 * speed_factor)); + } + + private Severity analyzeJob(long runtimeMs, long readBytes) throws IOException { + MapReduceCounterData jobCounter = new MapReduceCounterData(); + MapReduceTaskData[] mappers = new MapReduceTaskData[NUMTASKS]; + + MapReduceCounterData counter = new MapReduceCounterData(); + counter.set(MapReduceCounterData.CounterName.HDFS_BYTES_READ, readBytes); + + int i = 0; + for (; i < NUMTASKS; i++) { + mappers[i] = new MapReduceTaskData("task-id-"+i, "task-attempt-id-"+i); + mappers[i].setTimeAndCounter(new long[] { runtimeMs, 0, 0 ,0, 0}, counter); + } + + MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter).setMapperData(mappers); + HeuristicResult result = _heuristic.apply(data); + return result.getSeverity(); + } +} diff --git a/test/com/linkedin/drelephant/mapreduce/heuristics/MapperSpillHeuristicTest.java b/test/com/linkedin/drelephant/mapreduce/heuristics/MapperSpillHeuristicTest.java new file mode 100644 index 000000000..29a51ea18 --- /dev/null +++ b/test/com/linkedin/drelephant/mapreduce/heuristics/MapperSpillHeuristicTest.java @@ -0,0 +1,86 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.analysis.ApplicationType; +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import junit.framework.TestCase; + + +public class MapperSpillHeuristicTest extends TestCase { + + private static Map paramsMap = new HashMap(); + private static Heuristic _heuristic = new MapperSpillHeuristic(new HeuristicConfigurationData("test_heuristic", + "test_class", "test_view", new ApplicationType("test_apptype"), paramsMap)); + + public void testCritical() throws IOException { + // Spill ratio 3.0, 1000 tasks + assertEquals(Severity.CRITICAL, analyzeJob(3000, 1000, 1000)); + } + + public void testSevere() throws IOException { + // Spill ratio 2.5, 1000 tasks + assertEquals(Severity.SEVERE, analyzeJob(2500, 1000, 1000)); + } + + public void testModerate() throws IOException { + // Spill ratio 2.3, 1000 tasks + assertEquals(Severity.MODERATE, analyzeJob(2300, 1000, 1000)); + } + + public void testLow() throws IOException { + // Spill ratio 2.1, 1000 tasks + assertEquals(Severity.LOW, analyzeJob(2100, 1000, 1000)); + } + + public void testNone() throws IOException { + // Spill ratio 1.0, 1000 tasks + assertEquals(Severity.NONE, analyzeJob(1000, 1000, 1000)); + } + + public void testSmallNumTasks() throws IOException { + // Spill ratio 3.0, should be critical, but number of task is small(10), final result is NONE + assertEquals(Severity.NONE, analyzeJob(3000, 1000, 10)); + } + + private Severity analyzeJob(long spilledRecords, long mapRecords, int numTasks) throws IOException { + MapReduceCounterData jobCounter = new MapReduceCounterData(); + MapReduceTaskData[] mappers = new MapReduceTaskData[numTasks]; + + MapReduceCounterData counter = new MapReduceCounterData(); + counter.set(MapReduceCounterData.CounterName.SPILLED_RECORDS, spilledRecords); + counter.set(MapReduceCounterData.CounterName.MAP_OUTPUT_RECORDS, mapRecords); + + for (int i=0; i < numTasks; i++) { + mappers[i] = new MapReduceTaskData("task-id-"+i, "task-attempt-id-"+i); + mappers[i].setTimeAndCounter(new long[5], counter); + } + + MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter).setMapperData(mappers); + HeuristicResult result = _heuristic.apply(data); + return result.getSeverity(); + } +} diff --git a/test/com/linkedin/drelephant/mapreduce/heuristics/MapperTimeHeuristicTest.java b/test/com/linkedin/drelephant/mapreduce/heuristics/MapperTimeHeuristicTest.java new file mode 100644 index 000000000..79c9000e0 --- /dev/null +++ b/test/com/linkedin/drelephant/mapreduce/heuristics/MapperTimeHeuristicTest.java @@ -0,0 +1,106 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.analysis.ApplicationType; +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.math.Statistics; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import junit.framework.TestCase; + + +public class MapperTimeHeuristicTest extends TestCase { + + private static final long DUMMY_INPUT_SIZE = 0; + + private static Map paramsMap = new HashMap(); + private static Heuristic _heuristic = new MapperTimeHeuristic(new HeuristicConfigurationData("test_heuristic", + "test_class", "test_view", new ApplicationType("test_apptype"), paramsMap)); + + // Test batch 1: Large runtime. Heuristic is not affected by various number of tasks */ + + public void testLongRuntimeTasksCritical() throws IOException { + // Should decrease split size and increase number of tasks + assertEquals(Severity.CRITICAL, analyzeJob(10, 120 * Statistics.MINUTE_IN_MS)); + } + + public void testLongRuntimeTasksCriticalMore() throws IOException { + // Should decrease split size and increase number of tasks + assertEquals(Severity.CRITICAL, analyzeJob(1000, 120 * Statistics.MINUTE_IN_MS)); + } + + public void testLongRuntimeTasksSevere() throws IOException { + // Should decrease split size and increase number of tasks + assertEquals(Severity.SEVERE, analyzeJob(10, 60 * Statistics.MINUTE_IN_MS)); + } + + public void testLongRuntimeTasksSevereMore() throws IOException { + // Should decrease split size and increase number of tasks + assertEquals(Severity.SEVERE, analyzeJob(1000, 60 * Statistics.MINUTE_IN_MS)); + } + + // Test batch 2: Short runtime and various number of tasks + + public void testShortRuntimeTasksCritical() throws IOException { + // Should increase split size and decrease number of tasks + assertEquals(Severity.CRITICAL, analyzeJob(1000, 1 * Statistics.MINUTE_IN_MS)); + } + + public void testShortRuntimeTasksSevere() throws IOException { + // Should increase split size and decrease number of tasks + assertEquals(Severity.SEVERE, analyzeJob(500, 1 * Statistics.MINUTE_IN_MS)); + } + + public void testShortRuntimeTasksModerate() throws IOException { + assertEquals(Severity.MODERATE, analyzeJob(101, 1 * Statistics.MINUTE_IN_MS)); + } + + public void testShortRuntimeTasksLow() throws IOException { + assertEquals(Severity.LOW, analyzeJob(50, 1 * Statistics.MINUTE_IN_MS)); + } + + public void testShortRuntimeTasksNone() throws IOException { + // Small file with small number of tasks and short runtime. This should be the common case. + assertEquals(Severity.NONE, analyzeJob(5, 1 * Statistics.MINUTE_IN_MS)); + } + + private Severity analyzeJob(int numTasks, long runtime) throws IOException { + MapReduceCounterData jobCounter = new MapReduceCounterData(); + MapReduceTaskData[] mappers = new MapReduceTaskData[numTasks]; + + MapReduceCounterData taskCounter = new MapReduceCounterData(); + taskCounter.set(MapReduceCounterData.CounterName.HDFS_BYTES_READ, DUMMY_INPUT_SIZE); + + int i = 0; + for (; i < numTasks; i++) { + mappers[i] = new MapReduceTaskData("task-id-"+i, "task-attempt-id-"+i); + mappers[i].setTimeAndCounter(new long[] { runtime, 0, 0, 0, 0 }, taskCounter); + } + + MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter).setMapperData(mappers); + HeuristicResult result = _heuristic.apply(data); + return result.getSeverity(); + } +} diff --git a/test/com/linkedin/drelephant/mapreduce/heuristics/ReducerDataSkewHeuristicTest.java b/test/com/linkedin/drelephant/mapreduce/heuristics/ReducerDataSkewHeuristicTest.java new file mode 100644 index 000000000..9081813b2 --- /dev/null +++ b/test/com/linkedin/drelephant/mapreduce/heuristics/ReducerDataSkewHeuristicTest.java @@ -0,0 +1,96 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.analysis.ApplicationType; +import com.linkedin.drelephant.analysis.HDFSContext; +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; + +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import java.io.IOException; + +import java.util.HashMap; +import java.util.Map; +import junit.framework.TestCase; + + +public class ReducerDataSkewHeuristicTest extends TestCase { + private static final long UNITSIZE = HDFSContext.HDFS_BLOCK_SIZE / 64; //1mb + + private static Map paramsMap = new HashMap(); + private static Heuristic _heuristic = new ReducerDataSkewHeuristic(new HeuristicConfigurationData("test_heuristic", + "test_class", "test_view", new ApplicationType("test_apptype"), paramsMap)); + + public void testCritical() throws IOException { + assertEquals(Severity.CRITICAL, analyzeJob(200, 200, 1 * UNITSIZE, 100 * UNITSIZE)); + } + + public void testSevere() throws IOException { + assertEquals(Severity.SEVERE, analyzeJob(200, 200, 10 * UNITSIZE, 100 * UNITSIZE)); + } + + public void testModerate() throws IOException { + assertEquals(Severity.MODERATE, analyzeJob(200, 200, 20 * UNITSIZE, 100 * UNITSIZE)); + } + + public void testLow() throws IOException { + assertEquals(Severity.LOW, analyzeJob(200, 200, 30 * UNITSIZE, 100 * UNITSIZE)); + } + + public void testNone() throws IOException { + assertEquals(Severity.NONE, analyzeJob(200, 200, 50 * UNITSIZE, 100 * UNITSIZE)); + } + + public void testSmallFiles() throws IOException { + assertEquals(Severity.NONE, analyzeJob(200, 200, 1 * UNITSIZE, 5 * UNITSIZE)); + } + + public void testSmallTasks() throws IOException { + assertEquals(Severity.NONE, analyzeJob(5, 5, 10 * UNITSIZE, 100 * UNITSIZE)); + } + + private Severity analyzeJob(int numSmallTasks, int numLargeTasks, long smallInputSize, long largeInputSize) + throws IOException { + MapReduceCounterData jobCounter = new MapReduceCounterData(); + MapReduceTaskData[] reducers = new MapReduceTaskData[numSmallTasks + numLargeTasks]; + + MapReduceCounterData smallCounter = new MapReduceCounterData(); + smallCounter.set(MapReduceCounterData.CounterName.REDUCE_SHUFFLE_BYTES, smallInputSize); + + MapReduceCounterData largeCounter = new MapReduceCounterData(); + largeCounter.set(MapReduceCounterData.CounterName.REDUCE_SHUFFLE_BYTES, largeInputSize); + + int i = 0; + for (; i < numSmallTasks; i++) { + reducers[i] = new MapReduceTaskData("task-id-"+i, "task-attempt-id-"+i); + reducers[i].setTimeAndCounter(new long[5], smallCounter); + } + for (; i < numSmallTasks + numLargeTasks; i++) { + reducers[i] = new MapReduceTaskData("task-id-"+i, "task-attempt-id-"+i); + reducers[i].setTimeAndCounter(new long[5], largeCounter); + } + + MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter).setReducerData(reducers); + HeuristicResult result = _heuristic.apply(data); + return result.getSeverity(); + } +} diff --git a/test/com/linkedin/drelephant/mapreduce/heuristics/ReducerGCHeuristicTest.java b/test/com/linkedin/drelephant/mapreduce/heuristics/ReducerGCHeuristicTest.java new file mode 100644 index 000000000..da8b1df2d --- /dev/null +++ b/test/com/linkedin/drelephant/mapreduce/heuristics/ReducerGCHeuristicTest.java @@ -0,0 +1,80 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.analysis.ApplicationType; +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; + +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import junit.framework.TestCase; + + +public class ReducerGCHeuristicTest extends TestCase { + private static Map paramsMap = new HashMap(); + private static Heuristic _heuristic = new ReducerGCHeuristic(new HeuristicConfigurationData("test_heuristic", + "test_class", "test_view", new ApplicationType("test_apptype"), paramsMap)); + + private static int NUMTASKS = 100; + + public void testGCCritical() throws IOException { + assertEquals(Severity.CRITICAL, analyzeJob(1000000, 50000, 2000)); + } + + public void testGCSevere() throws IOException { + assertEquals(Severity.SEVERE, analyzeJob(1000000, 50000, 1500)); + } + + public void testGCModerate() throws IOException { + assertEquals(Severity.MODERATE, analyzeJob(1000000, 50000, 1000)); + } + + public void testGCNone() throws IOException { + assertEquals(Severity.NONE, analyzeJob(1000000, 50000, 300)); + } + + public void testShortTasksNone() throws IOException { + assertEquals(Severity.NONE, analyzeJob(100000, 50000, 2000)); + } + + + private Severity analyzeJob(long runtimeMs, long cpuMs, long gcMs) throws IOException { + MapReduceCounterData jobCounter = new MapReduceCounterData(); + MapReduceTaskData[] reducers = new MapReduceTaskData[NUMTASKS]; + + MapReduceCounterData counter = new MapReduceCounterData(); + counter.set(MapReduceCounterData.CounterName.CPU_MILLISECONDS, cpuMs); + counter.set(MapReduceCounterData.CounterName.GC_MILLISECONDS, gcMs); + + int i = 0; + for (; i < NUMTASKS; i++) { + reducers[i] = new MapReduceTaskData("task-id-"+i, "task-attempt-id-"+i); + reducers[i].setTimeAndCounter(new long[] { runtimeMs, 0, 0, 0, 0 }, counter); + } + + MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter).setReducerData(reducers); + HeuristicResult result = _heuristic.apply(data); + return result.getSeverity(); + } +} diff --git a/test/com/linkedin/drelephant/mapreduce/heuristics/ReducerMemoryHeuristicTest.java b/test/com/linkedin/drelephant/mapreduce/heuristics/ReducerMemoryHeuristicTest.java new file mode 100644 index 000000000..9e33557ca --- /dev/null +++ b/test/com/linkedin/drelephant/mapreduce/heuristics/ReducerMemoryHeuristicTest.java @@ -0,0 +1,91 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.analysis.ApplicationType; +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; + +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; + +import org.apache.commons.io.FileUtils; + +import junit.framework.TestCase; + + +public class ReducerMemoryHeuristicTest extends TestCase { + private static Map paramsMap = new HashMap(); + private static Heuristic _heuristic = new ReducerMemoryHeuristic(new HeuristicConfigurationData("test_heuristic", + "test_class", "test_view", new ApplicationType("test_apptype"), paramsMap)); + + private int NUMTASKS = 100; + + public void testLargeContainerSizeCritical() throws IOException { + assertEquals(Severity.CRITICAL, analyzeJob(2048, 8192)); + } + + public void testLargeContainerSizeSevere() throws IOException { + assertEquals(Severity.SEVERE, analyzeJob(3072, 8192)); + } + + public void testLargeContainerSizeModerate() throws IOException { + assertEquals(Severity.MODERATE, analyzeJob(4096, 8192)); + } + + public void testLargeContainerSizeNone() throws IOException { + assertEquals(Severity.NONE, analyzeJob(6144, 8192)); + } + + // If the task use default container size, it should not be flagged + public void testDefaultContainerNone() throws IOException { + assertEquals(Severity.NONE, analyzeJob(256, 2048)); + } + + public void testDefaultContainerNoneMore() throws IOException { + assertEquals(Severity.NONE, analyzeJob(1024, 2048)); + } + + private Severity analyzeJob(long taskAvgMemMB, long containerMemMB) throws IOException { + MapReduceCounterData jobCounter = new MapReduceCounterData(); + MapReduceTaskData[] reducers = new MapReduceTaskData[NUMTASKS]; + + MapReduceCounterData counter = new MapReduceCounterData(); + counter.set(MapReduceCounterData.CounterName.PHYSICAL_MEMORY_BYTES, taskAvgMemMB* FileUtils.ONE_MB); + + Properties p = new Properties(); + p.setProperty(ReducerMemoryHeuristic.REDUCER_MEMORY_CONF, Long.toString(containerMemMB)); + + int i = 0; + for (; i < NUMTASKS; i++) { + reducers[i] = new MapReduceTaskData("task-id-"+i, "task-attempt-id-"+i); + reducers[i].setTimeAndCounter(new long[5], counter); + } + + MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter).setReducerData(reducers); + data.setJobConf(p); + HeuristicResult result = _heuristic.apply(data); + return result.getSeverity(); + } +} diff --git a/test/com/linkedin/drelephant/mapreduce/heuristics/ReducerTimeHeuristicTest.java b/test/com/linkedin/drelephant/mapreduce/heuristics/ReducerTimeHeuristicTest.java new file mode 100644 index 000000000..c4929baf9 --- /dev/null +++ b/test/com/linkedin/drelephant/mapreduce/heuristics/ReducerTimeHeuristicTest.java @@ -0,0 +1,95 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.analysis.ApplicationType; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import java.io.IOException; + +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.math.Statistics; + +import java.util.HashMap; +import java.util.Map; +import junit.framework.TestCase; + + +public class ReducerTimeHeuristicTest extends TestCase { + + private static Map paramsMap = new HashMap(); + private static Heuristic _heuristic = new ReducerTimeHeuristic(new HeuristicConfigurationData("test_heuristic", + "test_class", "test_view", new ApplicationType("test_apptype"), paramsMap)); + + private static final long MINUTE_IN_MS = Statistics.MINUTE_IN_MS;; + + public void testShortRunetimeCritical() throws IOException { + assertEquals(Severity.CRITICAL, analyzeJob(1 * MINUTE_IN_MS, 1000)); + } + + public void testShortRunetimeSevere() throws IOException { + assertEquals(Severity.SEVERE, analyzeJob(1 * MINUTE_IN_MS, 500)); + } + + public void testShortRunetimeModerate() throws IOException { + assertEquals(Severity.MODERATE, analyzeJob(1 * MINUTE_IN_MS, 101)); + } + + public void testShortRunetimeLow() throws IOException { + assertEquals(Severity.LOW, analyzeJob(1 * MINUTE_IN_MS, 50)); + } + + public void testShortRunetimeNone() throws IOException { + assertEquals(Severity.NONE, analyzeJob(1 * MINUTE_IN_MS, 2)); + } + + public void testLongRunetimeCritical() throws IOException { + assertEquals(Severity.CRITICAL, analyzeJob(120 * MINUTE_IN_MS, 10)); + } + + // Long runtime severity is not affected by number of tasks + public void testLongRunetimeCriticalMore() throws IOException { + assertEquals(Severity.CRITICAL, analyzeJob(120 * MINUTE_IN_MS, 1000)); + } + + public void testLongRunetimeSevere() throws IOException { + assertEquals(Severity.SEVERE, analyzeJob(60 * MINUTE_IN_MS, 10)); + } + + public void testLongRunetimeSevereMore() throws IOException { + assertEquals(Severity.SEVERE, analyzeJob(60 * MINUTE_IN_MS, 1000)); + } + + private Severity analyzeJob(long runtimeMs, int numTasks) throws IOException { + MapReduceCounterData dummyCounter = new MapReduceCounterData(); + MapReduceTaskData[] reducers = new MapReduceTaskData[numTasks]; + + int i = 0; + for (; i < numTasks; i++) { + reducers[i] = new MapReduceTaskData("task-id-"+i, "task-attempt-id-"+i); + reducers[i].setTimeAndCounter(new long[] { runtimeMs, 0, 0, 0, 0 }, dummyCounter); + } + + MapReduceApplicationData data = new MapReduceApplicationData().setCounters(dummyCounter).setReducerData(reducers); + HeuristicResult result = _heuristic.apply(data); + return result.getSeverity(); + } +} diff --git a/test/com/linkedin/drelephant/mapreduce/heuristics/ShuffleSortHeuristicTest.java b/test/com/linkedin/drelephant/mapreduce/heuristics/ShuffleSortHeuristicTest.java new file mode 100644 index 000000000..284719a35 --- /dev/null +++ b/test/com/linkedin/drelephant/mapreduce/heuristics/ShuffleSortHeuristicTest.java @@ -0,0 +1,108 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.mapreduce.heuristics; + +import com.linkedin.drelephant.analysis.ApplicationType; +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; +import java.io.IOException; + +import com.linkedin.drelephant.analysis.Heuristic; +import com.linkedin.drelephant.analysis.HeuristicResult; +import com.linkedin.drelephant.analysis.Severity; +import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; +import com.linkedin.drelephant.math.Statistics; + +import java.util.HashMap; +import java.util.Map; +import junit.framework.TestCase; + + +public class ShuffleSortHeuristicTest extends TestCase { + + private static Map paramsMap = new HashMap(); + private static Heuristic _heuristic = new ShuffleSortHeuristic(new HeuristicConfigurationData("test_heuristic", + "test_class", "test_view", new ApplicationType("test_apptype"), paramsMap)); + + private static final int NUMTASKS = 100; + private static final long MINUTE_IN_MS = Statistics.MINUTE_IN_MS;; + + public void testLongShuffleCritical() throws IOException { + assertEquals(Severity.CRITICAL, analyzeJob(30 * MINUTE_IN_MS, 0, 5 * MINUTE_IN_MS)); + } + + public void testLongShuffleSevere() throws IOException { + assertEquals(Severity.SEVERE, analyzeJob(30 * MINUTE_IN_MS, 0, 10 * MINUTE_IN_MS)); + } + + public void testLongShuffleModerate() throws IOException { + assertEquals(Severity.MODERATE, analyzeJob(30 * MINUTE_IN_MS, 0, 20 * MINUTE_IN_MS)); + } + + public void testLongShuffleLow() throws IOException { + assertEquals(Severity.LOW, analyzeJob(30 * MINUTE_IN_MS, 0, 40 * MINUTE_IN_MS)); + } + + public void testLongShuffleNone() throws IOException { + assertEquals(Severity.NONE, analyzeJob(30 * MINUTE_IN_MS, 0, 80 * MINUTE_IN_MS)); + } + + public void testLongSortCritical() throws IOException { + assertEquals(Severity.CRITICAL, analyzeJob(0, 30 * MINUTE_IN_MS, 5 * MINUTE_IN_MS)); + } + + public void testLongSortSevere() throws IOException { + assertEquals(Severity.SEVERE, analyzeJob(0, 30 * MINUTE_IN_MS, 10 * MINUTE_IN_MS)); + } + + public void testLongSortModerate() throws IOException { + assertEquals(Severity.MODERATE, analyzeJob(0, 30 * MINUTE_IN_MS, 20 * MINUTE_IN_MS)); + } + + public void testLongSortLow() throws IOException { + assertEquals(Severity.LOW, analyzeJob(0, 30 * MINUTE_IN_MS, 40 * MINUTE_IN_MS)); + } + + public void testLongSortNone() throws IOException { + assertEquals(Severity.NONE, analyzeJob(0, 30 * MINUTE_IN_MS, 80 * MINUTE_IN_MS)); + } + + public void testShortShuffle() throws IOException { + assertEquals(Severity.NONE, analyzeJob(MINUTE_IN_MS / 2, 0, MINUTE_IN_MS / 2)); + } + + public void testShortSort() throws IOException { + assertEquals(Severity.NONE, analyzeJob(0, MINUTE_IN_MS / 2, MINUTE_IN_MS / 2)); + } + + private Severity analyzeJob(long shuffleTimeMs, long sortTimeMs, long reduceTimeMs) throws IOException { + MapReduceCounterData dummyCounter = new MapReduceCounterData(); + MapReduceTaskData[] reducers = new MapReduceTaskData[NUMTASKS]; + + int i = 0; + for (; i < NUMTASKS; i++) { + reducers[i] = new MapReduceTaskData("task-id-"+i, "task-attempt-id-"+i); + reducers[i].setTimeAndCounter( + new long[] { shuffleTimeMs + sortTimeMs + reduceTimeMs, shuffleTimeMs, sortTimeMs, 0, 0}, dummyCounter); + } + MapReduceApplicationData data = new MapReduceApplicationData().setCounters(dummyCounter).setReducerData(reducers); + HeuristicResult result = _heuristic.apply(data); + return result.getSeverity(); + } + +} diff --git a/test/com/linkedin/drelephant/math/StatisticsTest.java b/test/com/linkedin/drelephant/math/StatisticsTest.java new file mode 100644 index 000000000..9c28bf56b --- /dev/null +++ b/test/com/linkedin/drelephant/math/StatisticsTest.java @@ -0,0 +1,116 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.math; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import static org.junit.Assert.assertEquals; + + +public class StatisticsTest { + + @Test + public void testAverage1() { + assertEquals(6, Statistics.average(new long[]{2, 4, 6, 8, 10})); + assertEquals(0, Statistics.average(new long[] {})); + } + + @Test + public void testAverage2() { + ArrayList list1 = new ArrayList(); + list1.add(2l); + list1.add(4l); + list1.add(6l); + list1.add(8l); + list1.add(10l); + assertEquals(6, Statistics.average(list1)); + + ArrayList list2 = new ArrayList(); + assertEquals(0, Statistics.average(list2)); + } + + @Rule + public ExpectedException expectedEx = ExpectedException.none(); + + @Test + public void testMedian1() { + ArrayList list1 = new ArrayList(); + expectedEx.expect(IllegalArgumentException.class); + expectedEx.expectMessage("Median of an empty list is not defined."); + Statistics.median(list1); + } + + @Test + public void testMedian2() { + ArrayList list2 = new ArrayList(); + list2.add(2l); + list2.add(4l); + list2.add(6l); + list2.add(8l); + assertEquals(5, Statistics.median(list2)); + + list2.add(15l); + assertEquals(6, Statistics.median(list2)); + } + + @Test + public void testDescribeFactor() { + assertEquals("", Statistics.describeFactor(0, 0, "test")); + assertEquals("(5.00test)", Statistics.describeFactor(10, 2, "test")); + } + + @Test + public void testReadableTimespan() { + assertEquals("0 sec", Statistics.readableTimespan(0)); + assertEquals("1 sec", Statistics.readableTimespan(1000)); + assertEquals("1 min", Statistics.readableTimespan(60000)); + assertEquals("1 hr", Statistics.readableTimespan(3600000)); + } + + @Test + public void testPercentile() { + List finishTimes = new ArrayList(); + for(int i=1;i<=10;i++) { + finishTimes.add((long)i*10); + } + assertEquals(100, Statistics.percentile(finishTimes,100)); + assertEquals(50, Statistics.percentile(finishTimes,50)); + assertEquals(0, Statistics.percentile(finishTimes,0)); + assertEquals(10, Statistics.percentile(finishTimes,10)); + assertEquals(10, Statistics.percentile(finishTimes,4)); + + List oddLengthValues = new ArrayList(); + oddLengthValues.add(1L); + oddLengthValues.add(2L); + oddLengthValues.add(3L); + oddLengthValues.add(4L); + oddLengthValues.add(5L); + assertEquals(3L, Statistics.percentile(oddLengthValues, 50)); + + List finishTimeSingle = new ArrayList(); + finishTimeSingle.add(10L); + assertEquals(10,Statistics.percentile(finishTimeSingle,100)); + assertEquals(0, Statistics.percentile(finishTimeSingle,0)); + assertEquals(10,Statistics.percentile(finishTimeSingle, 10)); + assertEquals(10,Statistics.percentile(finishTimeSingle, 50)); + } +} diff --git a/test/com/linkedin/drelephant/schedulers/AirflowSchedulerTest.java b/test/com/linkedin/drelephant/schedulers/AirflowSchedulerTest.java new file mode 100644 index 000000000..780e730e4 --- /dev/null +++ b/test/com/linkedin/drelephant/schedulers/AirflowSchedulerTest.java @@ -0,0 +1,114 @@ +package com.linkedin.drelephant.schedulers; + +import com.linkedin.drelephant.configurations.scheduler.SchedulerConfigurationData; + +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; +import org.junit.Test; + +import static com.linkedin.drelephant.schedulers.AirflowScheduler.AIRFLOW_DAG_ID; +import static com.linkedin.drelephant.schedulers.AirflowScheduler.AIRFLOW_DAG_RUN_EXECUTION_DATE; +import static com.linkedin.drelephant.schedulers.AirflowScheduler.AIRFLOW_TASK_ID; +import static com.linkedin.drelephant.schedulers.AirflowScheduler.AIRFLOW_TASK_INSTANCE_EXECUTION_DATE; + +import static org.junit.Assert.assertEquals; + + +public class AirflowSchedulerTest { + + @Test + public void testAirflowLoadInfoWithCompleteConf() { + + AirflowScheduler airScheduler = new AirflowScheduler("id", getAirflowProperties(), getSchedulerConfData()); + + assertEquals("http://localhost:1717/admin/airflow/graph?dag_id=dag_id", airScheduler.getFlowDefUrl()); + assertEquals("dag_id", airScheduler.getFlowDefId()); + assertEquals("http://localhost:1717/admin/airflow/graph?dag_id=dag_id&execution_date=dag_run_execution_date", airScheduler.getFlowExecUrl()); + assertEquals("dag_id/dag_run_execution_date", airScheduler.getFlowExecId()); + + assertEquals("http://localhost:1717/admin/airflow/code?dag_id=dag_id&task_id=task_id", airScheduler.getJobDefUrl()); + assertEquals("dag_id/task_id", airScheduler.getJobDefId()); + assertEquals("http://localhost:1717/admin/airflow/log?dag_id=dag_id&task_id=task_id&execution_date=task_instance_execution_date", airScheduler.getJobExecUrl()); + assertEquals("dag_id/dag_run_execution_date/task_id/task_instance_execution_date", airScheduler.getJobExecId()); + + assertEquals("task_id", airScheduler.getJobName()); + assertEquals(0, airScheduler.getWorkflowDepth()); + assertEquals("airflow", airScheduler.getSchedulerName()); + + } + + @Test + public void testAirflowLoadInfoWithMissingProperty() { + + AirflowScheduler airScheduler = new AirflowScheduler("id", getPropertiesAndRemove(AIRFLOW_TASK_ID), getSchedulerConfData()); + + assertEquals("http://localhost:1717/admin/airflow/graph?dag_id=dag_id", airScheduler.getFlowDefUrl()); + assertEquals("dag_id", airScheduler.getFlowDefId()); + assertEquals("http://localhost:1717/admin/airflow/graph?dag_id=dag_id&execution_date=dag_run_execution_date", airScheduler.getFlowExecUrl()); + assertEquals("dag_id/dag_run_execution_date", airScheduler.getFlowExecId()); + + assertEquals(null, airScheduler.getJobDefUrl()); + assertEquals(null, airScheduler.getJobDefId()); + assertEquals(null, airScheduler.getJobExecUrl()); + assertEquals(null, airScheduler.getJobExecId()); + + assertEquals(null, airScheduler.getJobName()); + assertEquals(0, airScheduler.getWorkflowDepth()); + assertEquals("airflow", airScheduler.getSchedulerName()); + } + + @Test + public void testAirflowLoadInfoWithNullProperty() { + + AirflowScheduler airScheduler = new AirflowScheduler("id", null, getSchedulerConfData()); + + assertEquals(null, airScheduler.getFlowDefUrl()); + assertEquals(null, airScheduler.getFlowDefId()); + assertEquals(null, airScheduler.getFlowExecId()); + assertEquals(null, airScheduler.getFlowExecUrl()); + + assertEquals(null, airScheduler.getJobDefId()); + assertEquals(null, airScheduler.getJobDefUrl()); + assertEquals(null, airScheduler.getJobExecId()); + assertEquals(null, airScheduler.getJobExecUrl()); + + assertEquals(null, airScheduler.getJobName()); + assertEquals(0, airScheduler.getWorkflowDepth()); + assertEquals("airflow", airScheduler.getSchedulerName()); + } + + @Test + public void testAirflowLoadsNameFromConfData() { + + AirflowScheduler airScheduler = new AirflowScheduler("id", null, getSchedulerConfData("othername")); + assertEquals("othername", airScheduler.getSchedulerName()); + + } + + private static Properties getAirflowProperties() { + Properties properties = new Properties(); + properties.put(AIRFLOW_DAG_ID, "dag_id"); + properties.put(AIRFLOW_DAG_RUN_EXECUTION_DATE, "dag_run_execution_date"); + properties.put(AIRFLOW_TASK_ID, "task_id"); + properties.put(AIRFLOW_TASK_INSTANCE_EXECUTION_DATE, "task_instance_execution_date"); + + return properties; + } + + private static Properties getPropertiesAndRemove(String key) { + Properties properties = getAirflowProperties(); + properties.remove(key); + return properties; + } + + private static SchedulerConfigurationData getSchedulerConfData() { + return getSchedulerConfData("airflow"); + } + + private static SchedulerConfigurationData getSchedulerConfData(String name) { + Map paramMap = new HashMap(); + paramMap.put("airflowbaseurl", "http://localhost:1717"); + return new SchedulerConfigurationData(name, null, paramMap); + } +} diff --git a/test/com/linkedin/drelephant/schedulers/AzkabanSchedulerTest.java b/test/com/linkedin/drelephant/schedulers/AzkabanSchedulerTest.java new file mode 100644 index 000000000..8ef888f1f --- /dev/null +++ b/test/com/linkedin/drelephant/schedulers/AzkabanSchedulerTest.java @@ -0,0 +1,110 @@ +package com.linkedin.drelephant.schedulers; + +import com.linkedin.drelephant.configurations.scheduler.SchedulerConfigurationData; + +import java.util.Properties; +import org.junit.Test; + +import static com.linkedin.drelephant.schedulers.AzkabanScheduler.AZKABAN_JOB_URL; +import static com.linkedin.drelephant.schedulers.AzkabanScheduler.AZKABAN_ATTEMPT_URL; +import static com.linkedin.drelephant.schedulers.AzkabanScheduler.AZKABAN_EXECUTION_URL; +import static com.linkedin.drelephant.schedulers.AzkabanScheduler.AZKABAN_WORKFLOW_URL; +import static com.linkedin.drelephant.schedulers.AzkabanScheduler.AZKABAN_JOB_NAME; +import static org.junit.Assert.assertEquals; + + +public class AzkabanSchedulerTest { + + @Test + public void testAzkabanLoadInfoWithCompleteConf() { + + AzkabanScheduler azkScheduler = new AzkabanScheduler("id", getAzkabanProperties(), getSchedulerConfData()); + + assertEquals("https://host:9000/manager?project=project-name&flow=flow-name", azkScheduler.getFlowDefUrl()); + assertEquals("https://host:9000/manager?project=project-name&flow=flow-name", azkScheduler.getFlowDefId()); + assertEquals("https://host:9000/executor?execid=123456", azkScheduler.getFlowExecId()); + assertEquals("https://host:9000/executor?execid=123456", azkScheduler.getFlowExecUrl()); + + assertEquals("https://host:9000/manager?project=project-name&flow=flow-name&job=job-name", azkScheduler.getJobDefId()); + assertEquals("https://host:9000/manager?project=project-name&flow=flow-name&job=job-name", azkScheduler.getJobDefUrl()); + assertEquals("https://host:9000/executor?execid=123456&job=job-name&attempt=0", azkScheduler.getJobExecId()); + assertEquals("https://host:9000/executor?execid=123456&job=job-name&attempt=0", azkScheduler.getJobExecUrl()); + + assertEquals("job-name", azkScheduler.getJobName()); + assertEquals(0, azkScheduler.getWorkflowDepth()); + assertEquals("azkaban", azkScheduler.getSchedulerName()); + } + + @Test + public void testAzkabanLoadInfoWithMissingProperty() { + + AzkabanScheduler azkScheduler = new AzkabanScheduler("id", getPropertiesAndRemove(AZKABAN_JOB_URL), getSchedulerConfData()); + + assertEquals("https://host:9000/manager?project=project-name&flow=flow-name", azkScheduler.getFlowDefUrl()); + assertEquals("https://host:9000/manager?project=project-name&flow=flow-name", azkScheduler.getFlowDefId()); + assertEquals("https://host:9000/executor?execid=123456", azkScheduler.getFlowExecId()); + assertEquals("https://host:9000/executor?execid=123456", azkScheduler.getFlowExecUrl()); + + assertEquals(null, azkScheduler.getJobDefId()); + assertEquals(null, azkScheduler.getJobDefUrl()); + assertEquals("https://host:9000/executor?execid=123456&job=job-name&attempt=0", azkScheduler.getJobExecId()); + assertEquals("https://host:9000/executor?execid=123456&job=job-name&attempt=0", azkScheduler.getJobExecUrl()); + + assertEquals("job-name", azkScheduler.getJobName()); + assertEquals(0, azkScheduler.getWorkflowDepth()); + assertEquals("azkaban", azkScheduler.getSchedulerName()); + } + + @Test + public void testAzkabanLoadInfoWithNullProperty() { + + AzkabanScheduler azkScheduler = new AzkabanScheduler("id", null, getSchedulerConfData()); + + assertEquals(null, azkScheduler.getFlowDefUrl()); + assertEquals(null, azkScheduler.getFlowDefId()); + assertEquals(null, azkScheduler.getFlowExecId()); + assertEquals(null, azkScheduler.getFlowExecUrl()); + + assertEquals(null, azkScheduler.getJobDefId()); + assertEquals(null, azkScheduler.getJobDefUrl()); + assertEquals(null, azkScheduler.getJobExecId()); + assertEquals(null, azkScheduler.getJobExecUrl()); + + assertEquals(null, azkScheduler.getJobName()); + assertEquals(0, azkScheduler.getWorkflowDepth()); + assertEquals("azkaban", azkScheduler.getSchedulerName()); + } + + @Test + public void testAzkabanLoadsNameFromConfData() { + + AzkabanScheduler azkScheduler = new AzkabanScheduler("id", null, getSchedulerConfData("othername")); + assertEquals("othername", azkScheduler.getSchedulerName()); + + } + + private static Properties getAzkabanProperties() { + Properties properties = new Properties(); + properties.put(AZKABAN_JOB_URL, "https://host:9000/manager?project=project-name&flow=flow-name&job=job-name"); + properties.put(AZKABAN_ATTEMPT_URL, "https://host:9000/executor?execid=123456&job=job-name&attempt=0"); + properties.put(AZKABAN_WORKFLOW_URL, "https://host:9000/manager?project=project-name&flow=flow-name"); + properties.put(AZKABAN_EXECUTION_URL, "https://host:9000/executor?execid=123456"); + properties.put(AZKABAN_JOB_NAME, "job-name"); + + return properties; + } + + private static Properties getPropertiesAndRemove(String key) { + Properties properties = getAzkabanProperties(); + properties.remove(key); + return properties; + } + + private static SchedulerConfigurationData getSchedulerConfData() { + return getSchedulerConfData("azkaban"); + } + + private static SchedulerConfigurationData getSchedulerConfData(String name) { + return new SchedulerConfigurationData(name, null, null); + } +} diff --git a/test/com/linkedin/drelephant/schedulers/OozieSchedulerTest.java b/test/com/linkedin/drelephant/schedulers/OozieSchedulerTest.java new file mode 100644 index 000000000..e2c800287 --- /dev/null +++ b/test/com/linkedin/drelephant/schedulers/OozieSchedulerTest.java @@ -0,0 +1,319 @@ +package com.linkedin.drelephant.schedulers; + +import com.linkedin.drelephant.configurations.scheduler.SchedulerConfigurationData; +import org.apache.oozie.client.*; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; + +import static org.junit.Assert.assertEquals; +import static org.mockito.Matchers.anyString; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.*; + +@RunWith(MockitoJUnitRunner.class) +public class OozieSchedulerTest { + private static final String parentJobInfo = "0143705-160828184536493-oozie-oozi-W"; + private static final String jobAppName = "some-workflow-name"; + private static final String parentJobAppName = jobAppName + "parent"; + private static final String jobInfo = "0004167-160629080632562-oozie-oozi-W"; + private static final String actionName = "some-action"; + private static final String actionInfo = jobInfo + "@" + actionName; + private static final String oozieUrl = "http://localhost:11000/oozie?job="; + private static final String coordinatorJobInfo = "0163255-160828184536493-oozie-oozie-C"; + private static final String coordinatorActionInfo = coordinatorJobInfo + "@1537"; + private static final String coordinatorName = "some-coordinator"; + private static final String applicationUrl = "http://localhost:8088/proxy/application_1478790851061_4847/"; + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @Mock + private OozieClient oozieClient; + + @Mock + private WorkflowJob workflowJob; + + @Mock + private OozieClient manualCommittedJobClient; + + @Mock + private OozieClient scheduledJobClient; + + @Mock + private WorkflowAction manualChildAction; + + @Mock + private WorkflowJob manualChildJob; + + @Mock + private WorkflowJob manualParentJob; + + @Mock + private WorkflowAction scheduledChildAction; + + @Mock + private WorkflowJob scheduledChildJob; + + @Mock + private CoordinatorJob oozieCoordinatorJobInfo; + + private static Properties getNonOozieProperties() { + return new Properties(); + } + + private static Properties getOozieProperties() { + Properties properties = new Properties(); + + properties.put("oozie.action.id", actionInfo); + properties.put("oozie.job.id", jobInfo); + + return properties; + } + + private static SchedulerConfigurationData makeConfig(String name, Map params) { + return new SchedulerConfigurationData(name, OozieScheduler.class.getName(), params); + } + + private static Map getDefaultSchedulerParams() { + Map paramMap = new HashMap(); + + paramMap.put("oozie_api_url", "http://oozie.api/"); + paramMap.put("oozie_job_url_template", "http://oozie/search?workflow=%s"); + paramMap.put("oozie_job_exec_url_template", "http://oozie/workflows/%s"); + paramMap.put("oozie_workflow_url_template", "http://oozie/search?workflow=%s"); + paramMap.put("oozie_workflow_exec_url_template", "http://oozie/workflows/%s"); + + return paramMap; + } + + private static Map getSchedulerConfigWithout(String... keys) { + Map params = getDefaultSchedulerParams(); + + for (String key : keys) { + params.remove(key); + } + + return params; + } + + @Before + public void setUp() throws OozieClientException { + when(workflowJob.getAppName()).thenReturn(jobAppName); + when(workflowJob.getId()).thenReturn(jobInfo); + when(oozieClient.getJobInfo(eq(jobInfo))).thenReturn(workflowJob); + + //Manual committed job + when(manualChildAction.getConsoleUrl()).thenReturn("-"); + when(manualCommittedJobClient.getWorkflowActionInfo(actionInfo)).thenReturn(manualChildAction); + + when(manualChildJob.getId()).thenReturn(jobInfo); + when(manualChildJob.getAppName()).thenReturn(jobAppName); + when(manualChildJob.getParentId()).thenReturn(parentJobInfo); + when(manualChildJob.getConsoleUrl()).thenReturn(oozieUrl + jobInfo); + when(manualCommittedJobClient.getJobInfo(jobInfo)).thenReturn(manualChildJob); + + when(manualParentJob.getId()).thenReturn(parentJobInfo); + when(manualParentJob.getAppName()).thenReturn(parentJobAppName); + when(manualParentJob.getParentId()).thenReturn(null); + when(manualParentJob.getConsoleUrl()).thenReturn(oozieUrl + parentJobInfo); + when(manualCommittedJobClient.getJobInfo(parentJobInfo)).thenReturn(manualParentJob); + + //Oozie coordinated job + when(scheduledChildAction.getConsoleUrl()).thenReturn(applicationUrl); + when(scheduledJobClient.getWorkflowActionInfo(actionInfo)).thenReturn(scheduledChildAction); + + when(scheduledChildJob.getId()).thenReturn(jobInfo); + when(scheduledChildJob.getAppName()).thenReturn(jobAppName); + when(scheduledChildJob.getParentId()).thenReturn(coordinatorActionInfo); + when(scheduledChildJob.getConsoleUrl()).thenReturn(oozieUrl + jobInfo); + when(scheduledJobClient.getJobInfo(jobInfo)).thenReturn(scheduledChildJob); + + when(oozieCoordinatorJobInfo.getConsoleUrl()).thenReturn(null); + when(oozieCoordinatorJobInfo.getAppName()).thenReturn(coordinatorName); + when(scheduledJobClient.getCoordJobInfo(coordinatorJobInfo)).thenReturn(oozieCoordinatorJobInfo); + } + + /* + Job Reference ID: Oozie Job ID + Job Execution ID: Oozie Job ID + Flow Reference ID: Super Parent Oozie job ID + Flow Execution ID: Super Parent Oozie job ID + */ + @Test + public void testManualCommittedJob() throws Exception { + SchedulerConfigurationData schedulerConfig = makeConfig("oozie", new HashMap()); + OozieScheduler scheduler = new OozieScheduler("id", getOozieProperties(), schedulerConfig, manualCommittedJobClient); + + assertEquals(jobInfo, scheduler.getJobDefId()); + assertEquals(jobInfo, scheduler.getJobExecId()); + assertEquals(parentJobInfo, scheduler.getFlowDefId()); + assertEquals(parentJobInfo, scheduler.getFlowExecId()); + assertEquals(oozieUrl + jobInfo, scheduler.getJobDefUrl()); + assertEquals(oozieUrl + jobInfo, scheduler.getJobExecUrl()); + assertEquals(oozieUrl + parentJobInfo, scheduler.getFlowDefUrl()); + assertEquals(oozieUrl + parentJobInfo, scheduler.getFlowExecUrl()); + assertEquals(1, scheduler.getWorkflowDepth()); + assertEquals(jobInfo, scheduler.getJobName()); + assertEquals("oozie", scheduler.getSchedulerName()); + } + + /* + Job Reference ID: Job AppName-ActionName + Job Execution ID: Oozie Job ID + Flow Reference ID: Super Parent Job AppName + Flow Execution ID: Super Parent Oozie job ID + */ + @Test + public void testManualCommittedJobAppNameUnique() throws Exception { + Map params = new HashMap(); + params.put("oozie_app_name_uniqueness", "true"); + SchedulerConfigurationData schedulerConfig = makeConfig("oozie", params); + OozieScheduler scheduler = new OozieScheduler("id", getOozieProperties(), schedulerConfig, manualCommittedJobClient); + + assertEquals(oozieUrl + jobInfo, scheduler.getJobDefUrl()); + assertEquals(oozieUrl + jobInfo, scheduler.getJobExecUrl()); + assertEquals(oozieUrl + parentJobInfo, scheduler.getFlowDefUrl()); + assertEquals(oozieUrl + parentJobInfo, scheduler.getFlowExecUrl()); + assertEquals(1, scheduler.getWorkflowDepth()); + assertEquals(jobAppName + "-" + actionName, scheduler.getJobName()); + assertEquals("oozie", scheduler.getSchedulerName()); + } + + /* + Job Reference ID: C_ID-ActionName-Depth + Job Execution ID: Oozie Job ID + Flow Reference ID: Coordinator ID = C_ID + Flow Execution ID: Coordinator Action ID = C_ID@1 + */ + @Test + public void testOozieScheduledJob() throws Exception { + SchedulerConfigurationData schedulerConfig = makeConfig("oozie", new HashMap()); + OozieScheduler scheduler = new OozieScheduler("id", getOozieProperties(), schedulerConfig, scheduledJobClient); + + assertEquals(coordinatorJobInfo + "-" + actionName + "-0", scheduler.getJobDefId()); + assertEquals(jobInfo, scheduler.getJobExecId()); + assertEquals(coordinatorJobInfo, scheduler.getFlowDefId()); + assertEquals(coordinatorActionInfo, scheduler.getFlowExecId()); + assertEquals(oozieUrl + jobInfo, scheduler.getJobDefUrl()); + assertEquals(oozieUrl + jobInfo, scheduler.getJobExecUrl()); + assertEquals(coordinatorJobInfo, scheduler.getFlowDefUrl()); + assertEquals(coordinatorActionInfo, scheduler.getFlowExecUrl()); + assertEquals(0, scheduler.getWorkflowDepth()); + assertEquals(coordinatorJobInfo + "-" + actionName + "-0", scheduler.getJobName()); + assertEquals("oozie", scheduler.getSchedulerName()); + } + + /* + Job Reference ID: Job AppName-ActionName + Job Execution ID: Oozie Job ID + Flow Reference ID: Coordinator Job name + Flow Execution ID: Coordinator Action ID = C_ID@1 + */ + @Test + public void tesOozieScheduledJobAppNameUnique() throws Exception { + HashMap params = new HashMap(); + params.put("oozie_app_name_uniqueness", "true"); + SchedulerConfigurationData schedulerConfig = makeConfig("oozie", params); + OozieScheduler scheduler = new OozieScheduler("id", getOozieProperties(), schedulerConfig, scheduledJobClient); + + assertEquals(jobAppName + "-" + actionName, scheduler.getJobDefId()); + assertEquals(jobInfo, scheduler.getJobExecId()); + assertEquals(coordinatorName, scheduler.getFlowDefId()); + assertEquals(coordinatorActionInfo, scheduler.getFlowExecId()); + assertEquals(oozieUrl + jobInfo, scheduler.getJobDefUrl()); + assertEquals(oozieUrl + jobInfo, scheduler.getJobExecUrl()); + assertEquals(coordinatorJobInfo, scheduler.getFlowDefUrl()); + assertEquals(coordinatorActionInfo, scheduler.getFlowExecUrl()); + assertEquals(0, scheduler.getWorkflowDepth()); + assertEquals(jobAppName + "-" + actionName, scheduler.getJobName()); + assertEquals("oozie", scheduler.getSchedulerName()); + } + + @Test + public void testUserGivenTemplateArePreferredUrl() throws Exception { + Map params = getDefaultSchedulerParams(); + SchedulerConfigurationData schedulerConfig = makeConfig("oozie", params); + OozieScheduler scheduler = new OozieScheduler("id", getOozieProperties(), schedulerConfig, manualCommittedJobClient); + + assertEquals("http://oozie/search?workflow=" + jobInfo, scheduler.getJobDefUrl()); + assertEquals("http://oozie/workflows/" + jobInfo, scheduler.getJobExecUrl()); + assertEquals("http://oozie/search?workflow=" + parentJobInfo, scheduler.getFlowDefUrl()); + assertEquals("http://oozie/workflows/" + parentJobInfo, scheduler.getFlowExecUrl()); + } + + @Test + public void testDepthCalculation() throws Exception { + when(workflowJob.getParentId()).thenReturn(jobInfo, jobInfo, jobInfo, null); + SchedulerConfigurationData schedulerConfig = makeConfig("oozie", new HashMap()); + OozieScheduler scheduler = new OozieScheduler("id", getOozieProperties(), schedulerConfig, oozieClient); + + assertEquals(1, scheduler.getWorkflowDepth()); + + } + + @Test + public void testOozieLoadInfoWithOozieClientException() throws Exception { + thrown.expect(RuntimeException.class); + thrown.expectMessage("Failed fetching Oozie workflow " + jobInfo + " info"); + + doThrow(new OozieClientException("500 Internal server error", "BOOM")).when(oozieClient).getJobInfo(anyString()); + SchedulerConfigurationData schedulerConfig = makeConfig("oozie", getDefaultSchedulerParams()); + new OozieScheduler("id", getOozieProperties(), schedulerConfig, oozieClient); + } + + @Test + public void testOozieLoadInfoWithMissingProperty() { + SchedulerConfigurationData schedulerConfig = makeConfig("oozie", getDefaultSchedulerParams()); + OozieScheduler scheduler = new OozieScheduler("id", getNonOozieProperties(), schedulerConfig); + + assertEquals(null, scheduler.getFlowDefUrl()); + assertEquals(null, scheduler.getFlowDefId()); + assertEquals(null, scheduler.getFlowExecUrl()); + assertEquals(null, scheduler.getFlowExecId()); + + assertEquals(null, scheduler.getJobDefUrl()); + assertEquals(null, scheduler.getJobDefId()); + assertEquals(null, scheduler.getJobExecUrl()); + assertEquals(null, scheduler.getJobExecId()); + + assertEquals(null, scheduler.getJobName()); + assertEquals(0, scheduler.getWorkflowDepth()); + assertEquals("oozie", scheduler.getSchedulerName()); + } + + @Test + public void testOozieLoadInfoWithNullProperty() { + SchedulerConfigurationData schedulerConfig = makeConfig("oozie", getDefaultSchedulerParams()); + OozieScheduler scheduler = new OozieScheduler("id", null, schedulerConfig); + + assertEquals(null, scheduler.getFlowDefUrl()); + assertEquals(null, scheduler.getFlowDefId()); + assertEquals(null, scheduler.getFlowExecId()); + assertEquals(null, scheduler.getFlowExecUrl()); + + assertEquals(null, scheduler.getJobDefId()); + assertEquals(null, scheduler.getJobDefUrl()); + assertEquals(null, scheduler.getJobExecId()); + assertEquals(null, scheduler.getJobExecUrl()); + + assertEquals(null, scheduler.getJobName()); + assertEquals(0, scheduler.getWorkflowDepth()); + assertEquals("oozie", scheduler.getSchedulerName()); + } + + @Test + public void testOozieLoadsNameFromConfData() { + SchedulerConfigurationData schedulerConfig = makeConfig("othername", getDefaultSchedulerParams()); + OozieScheduler scheduler = new OozieScheduler("id", null, schedulerConfig); + assertEquals("othername", scheduler.getSchedulerName()); + } +} diff --git a/test/com/linkedin/drelephant/spark/SparkMetricsAggregatorTest.scala b/test/com/linkedin/drelephant/spark/SparkMetricsAggregatorTest.scala new file mode 100644 index 000000000..954000fa0 --- /dev/null +++ b/test/com/linkedin/drelephant/spark/SparkMetricsAggregatorTest.scala @@ -0,0 +1,174 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.spark + +import java.util.Date + +import scala.collection.JavaConverters + +import com.linkedin.drelephant.analysis.ApplicationType +import com.linkedin.drelephant.configurations.aggregator.AggregatorConfigurationData +import com.linkedin.drelephant.spark.data.{SparkApplicationData, SparkLogDerivedData, SparkRestDerivedData} +import com.linkedin.drelephant.spark.fetchers.statusapiv1.{ApplicationAttemptInfo, ApplicationInfo, ExecutorSummary} +import org.apache.spark.scheduler.SparkListenerEnvironmentUpdate +import org.scalatest.{FunSpec, Matchers} + +class SparkMetricsAggregatorTest extends FunSpec with Matchers { + import SparkMetricsAggregatorTest._ + + describe("SparkMetricsAggregator") { + val aggregatorConfigurationData = newFakeAggregatorConfigurationData( + Map("allocated_memory_waste_buffer_percentage" -> "0.5") + ) + + val appId = "application_1" + + val applicationInfo = { + val applicationAttemptInfo = { + val now = System.currentTimeMillis + val duration = 8000000L + newFakeApplicationAttemptInfo(Some("1"), startTime = new Date(now - duration), endTime = new Date(now)) + } + new ApplicationInfo(appId, name = "app", Seq(applicationAttemptInfo)) + } + + val restDerivedData = { + val executorSummaries = Seq( + newFakeExecutorSummary(id = "1", totalDuration = 1000000L), + newFakeExecutorSummary(id = "2", totalDuration = 3000000L) + ) + SparkRestDerivedData( + applicationInfo, + jobDatas = Seq.empty, + stageDatas = Seq.empty, + executorSummaries + ) + } + + describe("when it has log-derived data") { + val logDerivedData = { + val environmentUpdate = newFakeSparkListenerEnvironmentUpdate( + Map( + "spark.serializer" -> "org.apache.spark.serializer.KryoSerializer", + "spark.storage.memoryFraction" -> "0.3", + "spark.driver.memory" -> "2G", + "spark.executor.instances" -> "2", + "spark.executor.memory" -> "4g", + "spark.shuffle.memoryFraction" -> "0.5" + ) + ) + SparkLogDerivedData(environmentUpdate) + } + + val data = SparkApplicationData(appId, restDerivedData, Some(logDerivedData)) + + val aggregator = new SparkMetricsAggregator(aggregatorConfigurationData) + aggregator.aggregate(data) + + val result = aggregator.getResult + + it("calculates resources used") { + val totalExecutorMemoryMb = 2 * 4096 + val applicationDurationSeconds = 8000 + val executorMemoryMb = 4096 + val totalExecutorTaskTimeSeconds = 1000 + 3000 + result.getResourceUsed should be(totalExecutorMemoryMb * applicationDurationSeconds) + } + + it("calculates resources wasted") { + val totalExecutorMemoryMb = 2 * 4096 + val applicationDurationSeconds = 8000 + val resourceAllocated = totalExecutorMemoryMb * applicationDurationSeconds; + + val executorMemoryMb = 4096 + val totalExecutorTaskTimeSeconds = 1000 + 3000 + val resourceUsed = executorMemoryMb * totalExecutorTaskTimeSeconds; + + + result.getResourceWasted should be(resourceAllocated - resourceUsed * 1.5) + } + + it("doesn't calculate total delay") { + result.getTotalDelay should be(0L) + } + } + + describe("when it doesn't have log-derived data") { + val data = SparkApplicationData(appId, restDerivedData, logDerivedData = None) + + val aggregator = new SparkMetricsAggregator(aggregatorConfigurationData) + aggregator.aggregate(data) + + val result = aggregator.getResult + + it("doesn't calculate resources used") { + result.getResourceUsed should be(0L) + } + + it("doesn't calculate resources wasted") { + result.getResourceWasted should be(0L) + } + + it("doesn't calculate total delay") { + result.getTotalDelay should be(0L) + } + } + } +} + +object SparkMetricsAggregatorTest { + import JavaConverters._ + + def newFakeAggregatorConfigurationData(params: Map[String, String] = Map.empty): AggregatorConfigurationData = + new AggregatorConfigurationData("org.apache.spark.SparkMetricsAggregator", new ApplicationType("SPARK"), params.asJava) + + def newFakeSparkListenerEnvironmentUpdate(appConfigurationProperties: Map[String, String]): SparkListenerEnvironmentUpdate = + SparkListenerEnvironmentUpdate(Map("Spark Properties" -> appConfigurationProperties.toSeq)) + + def newFakeApplicationAttemptInfo( + attemptId: Option[String], + startTime: Date, + endTime: Date + ): ApplicationAttemptInfo = new ApplicationAttemptInfo( + attemptId, + startTime, + endTime, + sparkUser = "foo", + completed = true + ) + + def newFakeExecutorSummary( + id: String, + totalDuration: Long + ): ExecutorSummary = new ExecutorSummary( + id, + hostPort = "", + rddBlocks = 0, + memoryUsed = 0, + diskUsed = 0, + activeTasks = 0, + failedTasks = 0, + completedTasks = 0, + totalTasks = 0, + totalDuration, + totalInputBytes = 0, + totalShuffleRead = 0, + totalShuffleWrite = 0, + maxMemory = 0, + executorLogs = Map.empty + ) +} diff --git a/test/com/linkedin/drelephant/spark/data/SparkApplicationDataTest.scala b/test/com/linkedin/drelephant/spark/data/SparkApplicationDataTest.scala new file mode 100644 index 000000000..5cd686bb5 --- /dev/null +++ b/test/com/linkedin/drelephant/spark/data/SparkApplicationDataTest.scala @@ -0,0 +1,82 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.spark.data + +import java.util.Date + +import scala.collection.JavaConverters + +import com.linkedin.drelephant.spark.fetchers.statusapiv1.{ApplicationAttemptInfo, ApplicationInfo} +import org.apache.spark.scheduler.SparkListenerEnvironmentUpdate +import org.scalatest.{FunSpec, Matchers} + +class SparkApplicationDataTest extends FunSpec with Matchers { + import SparkApplicationDataTest._ + import JavaConverters._ + + describe("SparkApplicationData") { + val appId = "application_1" + val attemptId = Some("1") + + val applicationAttemptInfo = { + val now = System.currentTimeMillis + val duration = 8000000L + newFakeApplicationAttemptInfo(attemptId, startTime = new Date(now - duration), endTime = new Date(now)) + } + + val restDerivedData = SparkRestDerivedData( + new ApplicationInfo(appId, "app", Seq(applicationAttemptInfo)), + jobDatas = Seq.empty, + stageDatas = Seq.empty, + executorSummaries = Seq.empty + ) + + val configurationProperties = Map( + "spark.serializer" -> "org.apache.spark.serializer.KryoSerializer", + "spark.storage.memoryFraction" -> "0.3", + "spark.driver.memory" -> "2G", + "spark.executor.instances" -> "900", + "spark.executor.memory" -> "1g", + "spark.shuffle.memoryFraction" -> "0.5" + ) + + val logDerivedData = SparkLogDerivedData( + SparkListenerEnvironmentUpdate(Map("Spark Properties" -> configurationProperties.toSeq)) + ) + + describe(".getConf") { + it("returns the Spark properties") { + val data = SparkApplicationData(appId, restDerivedData, Some(logDerivedData)) + data.getConf.asScala should contain theSameElementsAs(configurationProperties) + } + } + } +} + +object SparkApplicationDataTest { + def newFakeApplicationAttemptInfo( + attemptId: Option[String], + startTime: Date, + endTime: Date + ): ApplicationAttemptInfo = new ApplicationAttemptInfo( + attemptId, + startTime, + endTime, + sparkUser = "foo", + completed = true + ) +} diff --git a/test/com/linkedin/drelephant/spark/fetchers/SparkFetcherTest.scala b/test/com/linkedin/drelephant/spark/fetchers/SparkFetcherTest.scala new file mode 100644 index 000000000..7d37168a6 --- /dev/null +++ b/test/com/linkedin/drelephant/spark/fetchers/SparkFetcherTest.scala @@ -0,0 +1,215 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.spark.fetchers + +import java.io.{File, FileOutputStream, InputStream, OutputStream} +import java.util.Date + +import scala.collection.JavaConverters +import scala.concurrent.{ExecutionContext, Future} + +import com.google.common.io.Files +import com.linkedin.drelephant.analysis.{AnalyticJob, ApplicationType} +import com.linkedin.drelephant.configurations.fetcher.FetcherConfigurationData +import com.linkedin.drelephant.spark.data.{SparkLogDerivedData, SparkRestDerivedData} +import com.linkedin.drelephant.spark.fetchers.statusapiv1.{ApplicationAttemptInfo, ApplicationInfo} +import com.linkedin.drelephant.util.SparkUtils +import org.apache.spark.SparkConf +import org.apache.spark.scheduler.SparkListenerEnvironmentUpdate +import org.mockito.Mockito +import org.scalatest.{FunSpec, Matchers} + +class SparkFetcherTest extends FunSpec with Matchers { + import SparkFetcherTest._ + + describe("SparkFetcher") { + import ExecutionContext.Implicits.global + + val fetcherConfigurationData = newFakeFetcherConfigurationData() + + val appId = "application_1" + + val t2 = System.currentTimeMillis + val t1 = t2 - 1 + val duration = 8000000L + + val restDerivedData = SparkRestDerivedData( + new ApplicationInfo( + appId, + "app", + Seq( + newFakeApplicationAttemptInfo(Some("2"), startTime = new Date(t2 - duration), endTime = new Date(t2)), + newFakeApplicationAttemptInfo(Some("1"), startTime = new Date(t1 - duration), endTime = new Date(t1)) + ) + ), + jobDatas = Seq.empty, + stageDatas = Seq.empty, + executorSummaries = Seq.empty + ) + + val logDerivedData = SparkLogDerivedData(SparkListenerEnvironmentUpdate(Map.empty)) + + val analyticJob = new AnalyticJob().setAppId(appId) + + it("returns data") { + val sparkFetcher = new SparkFetcher(fetcherConfigurationData) { + override lazy val sparkConf = new SparkConf() + override lazy val sparkRestClient = newFakeSparkRestClient(appId, Future(restDerivedData)) + override lazy val sparkLogClient = Some(newFakeSparkLogClient(appId, Some("2"), Future(logDerivedData))) + } + val data = sparkFetcher.fetchData(analyticJob) + data.appId should be(appId) + } + + it("throws an exception if the REST client fails") { + val sparkFetcher = new SparkFetcher(fetcherConfigurationData) { + override lazy val sparkConf = new SparkConf() + override lazy val sparkRestClient = newFakeSparkRestClient(appId, Future { throw new Exception() }) + override lazy val sparkLogClient = Some(newFakeSparkLogClient(appId, Some("2"), Future(logDerivedData))) + } + + an[Exception] should be thrownBy { sparkFetcher.fetchData(analyticJob) } + } + + it("throws an exception if the log client fails") { + val sparkFetcher = new SparkFetcher(fetcherConfigurationData) { + override lazy val sparkConf = new SparkConf() + override lazy val sparkRestClient = newFakeSparkRestClient(appId, Future(restDerivedData)) + override lazy val sparkLogClient = Some(newFakeSparkLogClient(appId, Some("2"), Future { throw new Exception() })) + } + + an[Exception] should be thrownBy { sparkFetcher.fetchData(analyticJob) } + } + + it("gets its SparkConf when SPARK_CONF_DIR is set") { + val tempDir = Files.createTempDir() + + val testResourceIn = getClass.getClassLoader.getResourceAsStream("spark-defaults.conf") + val testResourceFile = new File(tempDir, "spark-defaults.conf") + val testResourceOut = new FileOutputStream(testResourceFile) + managedCopyInputStreamToOutputStream(testResourceIn, testResourceOut) + + val fetcherConfigurationData = newFakeFetcherConfigurationData() + val sparkFetcher = new SparkFetcher(fetcherConfigurationData) { + override lazy val sparkUtils = new SparkUtils() { + override val defaultEnv = Map("SPARK_CONF_DIR" -> tempDir.toString) + } + } + val sparkConf = sparkFetcher.sparkConf + + tempDir.delete() + + sparkConf.get("spark.yarn.historyServer.address") should be("jh1.grid.example.com:18080") + sparkConf.get("spark.eventLog.enabled") should be("true") + sparkConf.get("spark.eventLog.compress") should be("true") + sparkConf.get("spark.eventLog.dir") should be("hdfs://nn1.grid.example.com:9000/logs/spark") + } + + it("gets its SparkConf when SPARK_HOME is set") { + val tempDir = Files.createTempDir() + val tempConfDir = new File(tempDir, "conf") + tempConfDir.mkdir() + + val testResourceIn = getClass.getClassLoader.getResourceAsStream("spark-defaults.conf") + val testResourceFile = new File(tempConfDir, "spark-defaults.conf") + val testResourceOut = new FileOutputStream(testResourceFile) + managedCopyInputStreamToOutputStream(testResourceIn, testResourceOut) + + val fetcherConfigurationData = newFakeFetcherConfigurationData() + val sparkFetcher = new SparkFetcher(fetcherConfigurationData) { + override lazy val sparkUtils = new SparkUtils() { + override val defaultEnv = Map("SPARK_HOME" -> tempDir.toString) + } + } + val sparkConf = sparkFetcher.sparkConf + + tempDir.delete() + + sparkConf.get("spark.yarn.historyServer.address") should be("jh1.grid.example.com:18080") + sparkConf.get("spark.eventLog.enabled") should be("true") + sparkConf.get("spark.eventLog.compress") should be("true") + sparkConf.get("spark.eventLog.dir") should be("hdfs://nn1.grid.example.com:9000/logs/spark") + } + + it("throws an exception if neither SPARK_CONF_DIR nor SPARK_HOME are set") { + val fetcherConfigurationData = newFakeFetcherConfigurationData() + val sparkFetcher = new SparkFetcher(fetcherConfigurationData) { + override lazy val sparkUtils = new SparkUtils() { override val defaultEnv = Map.empty[String, String] } + } + an[IllegalStateException] should be thrownBy { sparkFetcher.sparkConf } + } + } +} + +object SparkFetcherTest { + import JavaConverters._ + + def newFakeFetcherConfigurationData(): FetcherConfigurationData = + new FetcherConfigurationData(classOf[SparkFetcher].getName, new ApplicationType("SPARK"), Map.empty.asJava) + + def newFakeApplicationAttemptInfo( + attemptId: Option[String], + startTime: Date, + endTime: Date + ): ApplicationAttemptInfo = new ApplicationAttemptInfo( + attemptId, + startTime, + endTime, + sparkUser = "foo", + completed = true + ) + + def newFakeSparkRestClient( + appId: String, + restDerivedData: Future[SparkRestDerivedData] + )( + implicit ec: ExecutionContext + ): SparkRestClient = { + val sparkRestClient = Mockito.mock(classOf[SparkRestClient]) + Mockito.when(sparkRestClient.fetchData(appId)).thenReturn(restDerivedData) + sparkRestClient + } + + def newFakeSparkLogClient( + appId: String, + attemptId: Option[String], + logDerivedData: Future[SparkLogDerivedData] + )( + implicit ec: ExecutionContext + ): SparkLogClient = { + val sparkLogClient = Mockito.mock(classOf[SparkLogClient]) + Mockito.when(sparkLogClient.fetchData(appId, attemptId)).thenReturn(logDerivedData) + sparkLogClient + } + + def managedCopyInputStreamToOutputStream(in: => InputStream, out: => OutputStream): Unit = { + for { + input <- resource.managed(in) + output <- resource.managed(out) + } { + val buffer = new Array[Byte](512) + def read(): Unit = input.read(buffer) match { + case -1 => () + case bytesRead => { + output.write(buffer, 0, bytesRead) + read() + } + } + read() + } + } +} diff --git a/test/com/linkedin/drelephant/spark/fetchers/SparkLogClientTest.scala b/test/com/linkedin/drelephant/spark/fetchers/SparkLogClientTest.scala new file mode 100644 index 000000000..d5fd38927 --- /dev/null +++ b/test/com/linkedin/drelephant/spark/fetchers/SparkLogClientTest.scala @@ -0,0 +1,130 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.spark.fetchers + +import java.io.{ByteArrayInputStream, ByteArrayOutputStream, InputStream, OutputStream} +import java.net.URI + +import scala.concurrent.ExecutionContext + +import org.apache.hadoop.conf.Configuration +import org.apache.hadoop.fs.{FSDataInputStream, FileSystem, Path, PositionedReadable} +import org.apache.hadoop.io.compress.CompressionInputStream +import org.apache.spark.SparkConf +import org.mockito.BDDMockito +import org.scalatest.{AsyncFunSpec, Matchers} +import org.scalatest.mockito.MockitoSugar +import org.xerial.snappy.SnappyOutputStream + +class SparkLogClientTest extends AsyncFunSpec with Matchers with MockitoSugar { + import SparkLogClientTest._ + + describe("SparkLogClient") { + it("throws an exception if spark.eventLog.dir is missing") { + an[IllegalArgumentException] should be thrownBy { new SparkLogClient(new Configuration(), new SparkConf()) } + } + + it("uses spark.eventLog.dir if it is already an webhdfs URI") { + val hadoopConfiguration = new Configuration() + val sparkConf = new SparkConf().set("spark.eventLog.dir", "webhdfs://nn1.grid.example.com:50070/logs/spark") + val sparkLogClient = new SparkLogClient(hadoopConfiguration, sparkConf) + sparkLogClient.webhdfsEventLogUri should be(new URI("webhdfs://nn1.grid.example.com:50070/logs/spark")) + } + + it("uses a webhdfs URI constructed from spark.eventLog.dir and dfs.namenode.http-address if spark.eventLog.dir is an hdfs URI") { + val hadoopConfiguration = new Configuration() + hadoopConfiguration.set("dfs.namenode.http-address", "0.0.0.0:50070") + val sparkConf = new SparkConf().set("spark.eventLog.dir", "hdfs://nn1.grid.example.com:9000/logs/spark") + val sparkLogClient = new SparkLogClient(hadoopConfiguration, sparkConf) + sparkLogClient.webhdfsEventLogUri should be(new URI("webhdfs://nn1.grid.example.com:50070/logs/spark")) + } + + it("returns the desired data from the Spark event logs") { + import ExecutionContext.Implicits.global + + val hadoopConfiguration = new Configuration() + hadoopConfiguration.set("dfs.namenode.http-address", "0.0.0.0:50070") + + val sparkConf = + new SparkConf() + .set("spark.eventLog.dir", "hdfs://nn1.grid.example.com:9000/logs/spark") + .set("spark.eventLog.compress", "true") + .set("spark.io.compression.codec", "snappy") + + val appId = "application_1" + val attemptId = Some("1") + + val testResourceIn = getClass.getClassLoader.getResourceAsStream("spark_event_logs/event_log_2") + val byteOut = new ByteArrayOutputStream() + val snappyOut = new SnappyOutputStream(byteOut) + managedCopyInputStreamToOutputStream(testResourceIn, snappyOut) + + val sparkLogClient = new SparkLogClient(hadoopConfiguration, sparkConf) { + override lazy val fs: FileSystem = { + val fs = mock[FileSystem] + val expectedPath = new Path("webhdfs://nn1.grid.example.com:50070/logs/spark/application_1_1.snappy") + BDDMockito.given(fs.exists(expectedPath)).willReturn(true) + BDDMockito.given(fs.open(expectedPath)).willReturn( + new FSDataInputStream(new FakeCompressionInputStream(new ByteArrayInputStream(byteOut.toByteArray))) + ) + fs + } + } + + sparkLogClient.fetchData(appId, attemptId).map { logDerivedData => + val expectedProperties = Map( + "spark.serializer" -> "org.apache.spark.serializer.KryoSerializer", + "spark.storage.memoryFraction" -> "0.3", + "spark.driver.memory" -> "2G", + "spark.executor.instances" -> "900", + "spark.executor.memory" -> "1g", + "spark.shuffle.memoryFraction" -> "0.5" + ) + val actualProperties = logDerivedData.appConfigurationProperties + actualProperties should be(expectedProperties) + } + } + } +} + +object SparkLogClientTest { + class FakeCompressionInputStream(in: InputStream) extends CompressionInputStream(in) with PositionedReadable { + override def read(): Int = in.read() + override def read(b: Array[Byte], off: Int, len: Int): Int = in.read(b, off, len) + override def read(pos: Long, buffer: Array[Byte], off: Int, len: Int): Int = ??? + override def readFully(pos: Long, buffer: Array[Byte], off: Int, len: Int): Unit = ??? + override def readFully(pos: Long, buffer: Array[Byte]): Unit = ??? + override def resetState(): Unit = ??? + } + + def managedCopyInputStreamToOutputStream(in: => InputStream, out: => OutputStream): Unit = { + for { + input <- resource.managed(in) + output <- resource.managed(out) + } { + val buffer = new Array[Byte](512) + def read(): Unit = input.read(buffer) match { + case -1 => () + case bytesRead => { + output.write(buffer, 0, bytesRead) + read() + } + } + read() + } + } +} diff --git a/test/com/linkedin/drelephant/spark/fetchers/SparkRestClientTest.scala b/test/com/linkedin/drelephant/spark/fetchers/SparkRestClientTest.scala new file mode 100644 index 000000000..5c48e42aa --- /dev/null +++ b/test/com/linkedin/drelephant/spark/fetchers/SparkRestClientTest.scala @@ -0,0 +1,315 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.spark.fetchers + +import java.net.URI +import java.text.SimpleDateFormat +import java.util.{Calendar, Date, SimpleTimeZone} + +import scala.concurrent.ExecutionContext +import scala.util.Try + +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.module.scala.DefaultScalaModule +import com.linkedin.drelephant.spark.fetchers.statusapiv1.{ApplicationAttemptInfo, ApplicationInfo, ExecutorSummary, JobData, StageData} +import javax.ws.rs.{GET, Path, PathParam, Produces} +import javax.ws.rs.client.WebTarget +import javax.ws.rs.core.{Application, MediaType} +import javax.ws.rs.ext.ContextResolver +import org.apache.spark.SparkConf +import org.glassfish.jersey.client.ClientConfig +import org.glassfish.jersey.server.ResourceConfig +import org.glassfish.jersey.test.{JerseyTest, TestProperties} +import org.scalatest.{AsyncFunSpec, Matchers} +import org.scalatest.compatible.Assertion + +class SparkRestClientTest extends AsyncFunSpec with Matchers { + import SparkRestClientTest._ + + describe("SparkRestClient") { + it("throws an exception if spark.eventLog.dir is missing") { + an[IllegalArgumentException] should be thrownBy(new SparkRestClient(new SparkConf())) + } + + it("returns the desired data from the Spark REST API for cluster mode application") { + import ExecutionContext.Implicits.global + val fakeJerseyServer = new FakeJerseyServer() { + override def configure(): Application = super.configure() match { + case resourceConfig: ResourceConfig => + resourceConfig + .register(classOf[FetchClusterModeDataFixtures.ApiResource]) + .register(classOf[FetchClusterModeDataFixtures.ApplicationResource]) + .register(classOf[FetchClusterModeDataFixtures.JobsResource]) + .register(classOf[FetchClusterModeDataFixtures.StagesResource]) + .register(classOf[FetchClusterModeDataFixtures.ExecutorsResource]) + case config => config + } + } + + fakeJerseyServer.setUp() + + val historyServerUri = fakeJerseyServer.target.getUri + + val sparkConf = new SparkConf().set("spark.yarn.historyServer.address", s"${historyServerUri.getHost}:${historyServerUri.getPort}") + val sparkRestClient = new SparkRestClient(sparkConf) + + sparkRestClient.fetchData(FetchClusterModeDataFixtures.APP_ID) map { restDerivedData => + restDerivedData.applicationInfo.id should be(FetchClusterModeDataFixtures.APP_ID) + restDerivedData.applicationInfo.name should be(FetchClusterModeDataFixtures.APP_NAME) + restDerivedData.jobDatas should not be(None) + restDerivedData.stageDatas should not be(None) + restDerivedData.executorSummaries should not be(None) + } andThen { case assertion: Try[Assertion] => + fakeJerseyServer.tearDown() + assertion + } + } + + it("returns the desired data from the Spark REST API for client mode application") { + import ExecutionContext.Implicits.global + val fakeJerseyServer = new FakeJerseyServer() { + override def configure(): Application = super.configure() match { + case resourceConfig: ResourceConfig => + resourceConfig + .register(classOf[FetchClientModeDataFixtures.ApiResource]) + .register(classOf[FetchClientModeDataFixtures.ApplicationResource]) + .register(classOf[FetchClientModeDataFixtures.JobsResource]) + .register(classOf[FetchClientModeDataFixtures.StagesResource]) + .register(classOf[FetchClientModeDataFixtures.ExecutorsResource]) + case config => config + } + } + + fakeJerseyServer.setUp() + + val historyServerUri = fakeJerseyServer.target.getUri + + val sparkConf = new SparkConf().set("spark.yarn.historyServer.address", s"${historyServerUri.getHost}:${historyServerUri.getPort}") + val sparkRestClient = new SparkRestClient(sparkConf) + + sparkRestClient.fetchData(FetchClusterModeDataFixtures.APP_ID) map { restDerivedData => + restDerivedData.applicationInfo.id should be(FetchClusterModeDataFixtures.APP_ID) + restDerivedData.applicationInfo.name should be(FetchClusterModeDataFixtures.APP_NAME) + restDerivedData.jobDatas should not be(None) + restDerivedData.stageDatas should not be(None) + restDerivedData.executorSummaries should not be(None) + } andThen { case assertion: Try[Assertion] => + fakeJerseyServer.tearDown() + assertion + } + } + + it("returns the desired data from the Spark REST API for cluster mode application when http in jobhistory address") { + import ExecutionContext.Implicits.global + val fakeJerseyServer = new FakeJerseyServer() { + override def configure(): Application = super.configure() match { + case resourceConfig: ResourceConfig => + resourceConfig + .register(classOf[FetchClusterModeDataFixtures.ApiResource]) + .register(classOf[FetchClusterModeDataFixtures.ApplicationResource]) + .register(classOf[FetchClusterModeDataFixtures.JobsResource]) + .register(classOf[FetchClusterModeDataFixtures.StagesResource]) + .register(classOf[FetchClusterModeDataFixtures.ExecutorsResource]) + case config => config + } + } + + fakeJerseyServer.setUp() + + val historyServerUri = fakeJerseyServer.target.getUri + + val sparkConf = new SparkConf().set("spark.yarn.historyServer.address", s"http://${historyServerUri.getHost}:${historyServerUri.getPort}") + val sparkRestClient = new SparkRestClient(sparkConf) + + sparkRestClient.fetchData(FetchClusterModeDataFixtures.APP_ID) map { restDerivedData => + restDerivedData.applicationInfo.id should be(FetchClusterModeDataFixtures.APP_ID) + restDerivedData.applicationInfo.name should be(FetchClusterModeDataFixtures.APP_NAME) + restDerivedData.jobDatas should not be(None) + restDerivedData.stageDatas should not be(None) + restDerivedData.executorSummaries should not be(None) + } andThen { case assertion: Try[Assertion] => + fakeJerseyServer.tearDown() + assertion + } + } + } +} + +object SparkRestClientTest { + class FakeJerseyServer extends JerseyTest { + override def configure(): Application = { + forceSet(TestProperties.CONTAINER_PORT, "0") + enable(TestProperties.LOG_TRAFFIC) + enable(TestProperties.DUMP_ENTITY) + + new ResourceConfig() + .register(classOf[FakeJerseyObjectMapperProvider]) + } + + override def configureClient(clientConfig: ClientConfig): Unit = { + clientConfig.register(classOf[FakeJerseyObjectMapperProvider]) + } + } + + class FakeJerseyObjectMapperProvider extends ContextResolver[ObjectMapper] { + lazy val objectMapper = { + val objectMapper = new ObjectMapper() + objectMapper.registerModule(DefaultScalaModule) + objectMapper.setDateFormat(dateFormat) + objectMapper + } + + lazy val dateFormat = { + val iso8601 = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'GMT'") + val cal = Calendar.getInstance(new SimpleTimeZone(0, "GMT")) + iso8601.setCalendar(cal) + iso8601 + } + + override def getContext(cls: Class[_]): ObjectMapper = objectMapper + } + + object FetchClusterModeDataFixtures { + val APP_ID = "application_1" + val APP_NAME = "app" + + @Path("/api/v1") + class ApiResource { + @Path("applications/{appId}") + def getApplication(): ApplicationResource = new ApplicationResource() + + @Path("applications/{appId}/{attemptId}/jobs") + def getJobs(): JobsResource = new JobsResource() + + @Path("applications/{appId}/{attemptId}/stages") + def getStages(): StagesResource = new StagesResource() + + @Path("applications/{appId}/{attemptId}/executors") + def getExecutors(): ExecutorsResource = new ExecutorsResource() + } + + @Produces(Array(MediaType.APPLICATION_JSON)) + class ApplicationResource { + @GET + def getApplication(@PathParam("appId") appId: String): ApplicationInfo = { + val t2 = System.currentTimeMillis + val t1 = t2 - 1 + val duration = 8000000L + new ApplicationInfo( + APP_ID, + APP_NAME, + Seq( + newFakeApplicationAttemptInfo(Some("2"), startTime = new Date(t2 - duration), endTime = new Date(t2)), + newFakeApplicationAttemptInfo(Some("1"), startTime = new Date(t1 - duration), endTime = new Date(t1)) + ) + ) + } + } + + @Produces(Array(MediaType.APPLICATION_JSON)) + class JobsResource { + @GET + def getJobs(@PathParam("appId") appId: String, @PathParam("attemptId") attemptId: String): Seq[JobData] = + if (attemptId == "2") Seq.empty else throw new Exception() + } + + @Produces(Array(MediaType.APPLICATION_JSON)) + class StagesResource { + @GET + def getStages(@PathParam("appId") appId: String, @PathParam("attemptId") attemptId: String): Seq[StageData] = + if (attemptId == "2") Seq.empty else throw new Exception() + } + + @Produces(Array(MediaType.APPLICATION_JSON)) + class ExecutorsResource { + @GET + def getExecutors(@PathParam("appId") appId: String, @PathParam("attemptId") attemptId: String): Seq[ExecutorSummary] = + if (attemptId == "2") Seq.empty else throw new Exception() + } + } + + object FetchClientModeDataFixtures { + val APP_ID = "application_1" + val APP_NAME = "app" + + @Path("/api/v1") + class ApiResource { + @Path("applications/{appId}") + def getApplication(): ApplicationResource = new ApplicationResource() + + @Path("applications/{appId}/jobs") + def getJobs(): JobsResource = new JobsResource() + + @Path("applications/{appId}/stages") + def getStages(): StagesResource = new StagesResource() + + @Path("applications/{appId}/executors") + def getExecutors(): ExecutorsResource = new ExecutorsResource() + } + + @Produces(Array(MediaType.APPLICATION_JSON)) + class ApplicationResource { + @GET + def getApplication(@PathParam("appId") appId: String): ApplicationInfo = { + val t2 = System.currentTimeMillis + val t1 = t2 - 1 + val duration = 8000000L + new ApplicationInfo( + APP_ID, + APP_NAME, + Seq( + newFakeApplicationAttemptInfo(None, startTime = new Date(t2 - duration), endTime = new Date(t2)), + newFakeApplicationAttemptInfo(None, startTime = new Date(t1 - duration), endTime = new Date(t1)) + ) + ) + } + } + + @Produces(Array(MediaType.APPLICATION_JSON)) + class JobsResource { + @GET + def getJobs(@PathParam("appId") appId: String): Seq[JobData] = + Seq.empty + } + + @Produces(Array(MediaType.APPLICATION_JSON)) + class StagesResource { + @GET + def getStages(@PathParam("appId") appId: String): Seq[StageData] = + Seq.empty + } + + @Produces(Array(MediaType.APPLICATION_JSON)) + class ExecutorsResource { + @GET + def getExecutors(@PathParam("appId") appId: String): Seq[ExecutorSummary] = + Seq.empty + } + } + + def newFakeApplicationAttemptInfo( + attemptId: Option[String], + startTime: Date, + endTime: Date + ): ApplicationAttemptInfo = new ApplicationAttemptInfo( + attemptId, + startTime, + endTime, + sparkUser = "foo", + completed = true + ) +} diff --git a/test/com/linkedin/drelephant/spark/heuristics/ConfigurationHeuristicTest.scala b/test/com/linkedin/drelephant/spark/heuristics/ConfigurationHeuristicTest.scala new file mode 100644 index 000000000..e10870883 --- /dev/null +++ b/test/com/linkedin/drelephant/spark/heuristics/ConfigurationHeuristicTest.scala @@ -0,0 +1,210 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.spark.heuristics + +import com.linkedin.drelephant.spark.data.SparkRestDerivedData +import com.linkedin.drelephant.spark.fetchers.statusapiv1.{ApplicationAttemptInfo, ApplicationInfo} +import scala.collection.JavaConverters + +import com.linkedin.drelephant.analysis.{ApplicationType, Severity} +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData +import com.linkedin.drelephant.spark.data.{SparkApplicationData, SparkLogDerivedData} +import org.apache.spark.scheduler.SparkListenerEnvironmentUpdate +import org.scalatest.{FunSpec, Matchers} +import java.util.Date + + +class ConfigurationHeuristicTest extends FunSpec with Matchers { + import ConfigurationHeuristicTest._ + + describe("ConfigurationHeuristic") { + val heuristicConfigurationData = newFakeHeuristicConfigurationData( + Map( + "serializer_if_non_null_recommendation" -> "org.apache.spark.serializer.KryoSerializer", + "shuffle_manager_if_non_null_recommendation" -> "sort" + ) + ) + + val configurationHeuristic = new ConfigurationHeuristic(heuristicConfigurationData) + + describe(".apply") { + val configurationProperties = Map( + "spark.serializer" -> "org.apache.spark.serializer.KryoSerializer", + "spark.storage.memoryFraction" -> "0.3", + "spark.driver.memory" -> "2G", + "spark.executor.instances" -> "900", + "spark.executor.memory" -> "1g", + "spark.shuffle.memoryFraction" -> "0.5" + ) + + val data = newFakeSparkApplicationData(configurationProperties) + val heuristicResult = configurationHeuristic.apply(data) + val heuristicResultDetails = heuristicResult.getHeuristicResultDetails + + it("returns the severity") { + heuristicResult.getSeverity should be(Severity.NONE) + } + + it("returns the driver memory") { + val details = heuristicResultDetails.get(0) + details.getName should include("spark.driver.memory") + details.getValue should be("2 GB") + } + + it("returns the executor memory") { + val details = heuristicResultDetails.get(1) + details.getName should include("spark.executor.memory") + details.getValue should be("1 GB") + } + + it("returns the executor instances") { + val details = heuristicResultDetails.get(2) + details.getName should include("spark.executor.instances") + details.getValue should be("900") + } + + it("returns the executor cores") { + val details = heuristicResultDetails.get(3) + details.getName should include("spark.executor.cores") + details.getValue should include("default") + } + + it("returns the application duration") { + val details = heuristicResultDetails.get(4) + details.getName should include("spark.application.duration") + details.getValue should include("10") + } + + it("returns the serializer") { + val details = heuristicResultDetails.get(5) + details.getName should include("spark.serializer") + details.getValue should be("org.apache.spark.serializer.KryoSerializer") + } + } + + describe(".Evaluator") { + import ConfigurationHeuristic.Evaluator + + def newEvaluatorWithConfigurationProperties(configurationProperties: Map[String, String]): Evaluator = { + new Evaluator(configurationHeuristic, newFakeSparkApplicationData(configurationProperties)) + } + + it("has the driver memory bytes when they're present") { + val evaluator = newEvaluatorWithConfigurationProperties(Map("spark.driver.memory" -> "2G")) + evaluator.driverMemoryBytes should be(Some(2L * 1024 * 1024 * 1024)) + } + + it("has no driver memory bytes when they're absent") { + val evaluator = newEvaluatorWithConfigurationProperties(Map.empty) + evaluator.driverMemoryBytes should be(None) + } + + it("has the executor memory bytes when they're present") { + val evaluator = newEvaluatorWithConfigurationProperties(Map("spark.executor.memory" -> "1g")) + evaluator.executorMemoryBytes should be(Some(1L * 1024 * 1024 * 1024)) + } + + it("has no executor memory bytes when they're absent") { + val evaluator = newEvaluatorWithConfigurationProperties(Map.empty) + evaluator.executorMemoryBytes should be(None) + } + + it("has the executor instances when they're present") { + val evaluator = newEvaluatorWithConfigurationProperties(Map("spark.executor.instances" -> "900")) + evaluator.executorInstances should be(Some(900)) + } + + it("has no executor instances when they're absent") { + val evaluator = newEvaluatorWithConfigurationProperties(Map.empty) + evaluator.executorInstances should be(None) + } + + it("has the executor cores when they're present") { + val evaluator = newEvaluatorWithConfigurationProperties(Map("spark.executor.cores" -> "2")) + evaluator.executorCores should be(Some(2)) + } + + it("has no executor cores when they're absent") { + val evaluator = newEvaluatorWithConfigurationProperties(Map.empty) + evaluator.executorCores should be(None) + } + + it("has the serializer when it's present") { + val evaluator = newEvaluatorWithConfigurationProperties(Map("spark.serializer" -> "org.apache.spark.serializer.KryoSerializer")) + evaluator.serializer should be(Some("org.apache.spark.serializer.KryoSerializer")) + } + + it("has no serializer when it's absent") { + val evaluator = newEvaluatorWithConfigurationProperties(Map.empty) + evaluator.serializer should be(None) + } + + it("has the severity of the serializer setting when it matches our recommendation") { + val evaluator = newEvaluatorWithConfigurationProperties(Map("spark.serializer" -> "org.apache.spark.serializer.KryoSerializer")) + evaluator.serializerSeverity should be(Severity.NONE) + } + + it("has the severity of the serializer setting when it doesn't match our recommendation and is non-null") { + val evaluator = newEvaluatorWithConfigurationProperties(Map("spark.serializer" -> "org.apache.spark.serializer.FooSerializer")) + evaluator.serializerSeverity should be(Severity.MODERATE) + } + + it("has the severity of the serializer setting when it is null") { + val evaluator = newEvaluatorWithConfigurationProperties(Map.empty) + evaluator.serializerSeverity should be(Severity.NONE) + } + + it("computes the overall severity when there are some issues") { + val evaluator = newEvaluatorWithConfigurationProperties(Map("spark.serializer" -> "org.apache.spark.serializer.FooSerializer")) + evaluator.severity should be(Severity.MODERATE) + } + + it("computes the overall severity when there are no issues") { + val evaluator = newEvaluatorWithConfigurationProperties(Map.empty) + evaluator.severity should be(Severity.NONE) + } + } + } +} + +object ConfigurationHeuristicTest { + import JavaConverters._ + + def newFakeHeuristicConfigurationData(params: Map[String, String] = Map.empty): HeuristicConfigurationData = + new HeuristicConfigurationData("heuristic", "class", "view", new ApplicationType("type"), params.asJava) + + def newFakeSparkApplicationData(appConfigurationProperties: Map[String, String]): SparkApplicationData = { + val logDerivedData = SparkLogDerivedData( + SparkListenerEnvironmentUpdate(Map("Spark Properties" -> appConfigurationProperties.toSeq)) + ) + + val appId = "application_1" + val startDate = new Date() + val endDate = new Date(startDate.getTime() + 10000) + val applicationAttempt = new ApplicationAttemptInfo(Option("attempt1"),startDate, endDate, "sparkUser") + val applicationAttempts = Seq(applicationAttempt) + + val restDerivedData = SparkRestDerivedData( + new ApplicationInfo(appId, name = "app", applicationAttempts), + jobDatas = Seq.empty, + stageDatas = Seq.empty, + executorSummaries = Seq.empty + ) + + SparkApplicationData(appId, restDerivedData, Some(logDerivedData)) + } +} diff --git a/test/com/linkedin/drelephant/spark/heuristics/ExecutorsHeuristicTest.scala b/test/com/linkedin/drelephant/spark/heuristics/ExecutorsHeuristicTest.scala new file mode 100644 index 000000000..90e360caf --- /dev/null +++ b/test/com/linkedin/drelephant/spark/heuristics/ExecutorsHeuristicTest.scala @@ -0,0 +1,267 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.spark.heuristics + +import scala.collection.JavaConverters + +import com.linkedin.drelephant.analysis.{ApplicationType, Severity, SeverityThresholds} +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData +import com.linkedin.drelephant.spark.data.{SparkApplicationData, SparkLogDerivedData, SparkRestDerivedData} +import com.linkedin.drelephant.spark.fetchers.statusapiv1.{ApplicationInfo, ExecutorSummary} +import org.apache.spark.scheduler.SparkListenerEnvironmentUpdate +import org.scalatest.{FunSpec, Matchers} + + +class ExecutorsHeuristicTest extends FunSpec with Matchers { + import ExecutorsHeuristicTest._ + + describe("ExecutorsHeuristic") { + val heuristicConfigurationData = newFakeHeuristicConfigurationData( + Map( + "max_to_median_ratio_severity_thresholds" -> "1.414,2,4,16", + "ignore_max_bytes_less_than_threshold" -> "4000000", + "ignore_max_millis_less_than_threshold" -> "4000001" + ) + ) + val executorsHeuristic = new ExecutorsHeuristic(heuristicConfigurationData) + + val maxMemory = 5000000L + + val executorSummaries = Seq( + newFakeExecutorSummary( + id = "1", + memoryUsed = 1000000L, + totalDuration = 1000001L, + totalInputBytes = 1000002L, + totalShuffleRead = 1000003L, + totalShuffleWrite = 1000004L, + maxMemory + ), + newFakeExecutorSummary( + id = "2", + memoryUsed = 2000000L, + totalDuration = 2000001L, + totalInputBytes = 2000002L, + totalShuffleRead = 2000003L, + totalShuffleWrite = 2000004L, + maxMemory + ), + newFakeExecutorSummary( + id = "3", + memoryUsed = 3000000L, + totalDuration = 3000001L, + totalInputBytes = 3000002L, + totalShuffleRead = 3000003L, + totalShuffleWrite = 3000004L, + maxMemory + ), + newFakeExecutorSummary( + id = "4", + memoryUsed = 4000000L, + totalDuration = 4000001L, + totalInputBytes = 4000002L, + totalShuffleRead = 4000003L, + totalShuffleWrite = 4000004L, + maxMemory + ) + ) + + describe(".apply") { + val data = newFakeSparkApplicationData(executorSummaries) + val heuristicResult = executorsHeuristic.apply(data) + val heuristicResultDetails = heuristicResult.getHeuristicResultDetails + + it("returns the severity") { + heuristicResult.getSeverity should be(Severity.LOW) + } + + it("returns the total storage memory allocated") { + val details = heuristicResultDetails.get(0) + details.getName should include("storage memory allocated") + details.getValue should be("19.07 MB") + } + + it("returns the total storage memory used") { + val details = heuristicResultDetails.get(1) + details.getName should include("storage memory used") + details.getValue should be("9.54 MB") + } + + it("returns the storage memory utilization rate") { + val details = heuristicResultDetails.get(2) + details.getName should include("storage memory utilization rate") + details.getValue should be("0.500") + } + + it("returns the distribution of storage memory used among executors") { + val details = heuristicResultDetails.get(3) + details.getName should include("storage memory used") + details.getValue should include regex("976.56 KB.*976.56 KB.*2.38 MB.*2.86 MB.*3.81 MB") + } + + it("returns the distribution of task time among executors") { + val details = heuristicResultDetails.get(4) + details.getName should include("task time") + details.getValue should include regex("16 min 40 sec.*16 min 40 sec.*41 min 40 sec.*50 min.*1 hr 6 min 40 sec") + } + + it("returns the total sum of task time among executors") { + val details = heuristicResultDetails.get(5) + details.getName should include("task time sum") + details.getValue should include regex("10000") + } + + it("returns the distribution of input bytes among executors") { + val details = heuristicResultDetails.get(6) + details.getName should include("input bytes") + details.getValue should include regex("976.56 KB.*976.56 KB.*2.38 MB.*2.86 MB.*3.81 MB") + } + + it("returns the distribution of shuffle read bytes among executors") { + val details = heuristicResultDetails.get(7) + details.getName should include("shuffle read bytes") + details.getValue should include regex("976.57 KB.*976.57 KB.*2.38 MB.*2.86 MB.*3.81 MB") + } + + it("returns the distribution of shuffle write bytes among executors") { + val details = heuristicResultDetails.get(8) + details.getName should include("shuffle write bytes") + details.getValue should include regex("976.57 KB.*976.57 KB.*2.38 MB.*2.86 MB.*3.81 MB") + } + } + + describe(".Evaluator") { + import ExecutorsHeuristic.Evaluator + import ExecutorsHeuristic.Distribution + + val data = newFakeSparkApplicationData(executorSummaries) + val evaluator = new Evaluator(executorsHeuristic, data) + + it("has the total storage memory allocated") { + evaluator.totalStorageMemoryAllocated should be(20000000L) + } + + it("has the total storage memory used") { + evaluator.totalStorageMemoryUsed should be(10000000L) + } + + it("has the storage memory utilization rate") { + evaluator.storageMemoryUtilizationRate should be(0.5D) + } + + it("has the distribution of storage memory used among executors") { + evaluator.storageMemoryUsedDistribution should be( + Distribution(1000000L, 1000000L, 2500000L, 3000000L, 4000000L) + ) + } + + it("has the distribution of task time among executors") { + evaluator.taskTimeDistribution should be( + Distribution(1000001L, 1000001L, 2500001L, 3000001L, 4000001L) + ) + } + + it("has the distribution of input bytes among executors") { + evaluator.inputBytesDistribution should be( + Distribution(1000002L, 1000002L, 2500002L, 3000002L, 4000002L) + ) + } + + it("has the distribution of shuffle read among executors") { + evaluator.shuffleReadBytesDistribution should be( + Distribution(1000003L, 1000003L, 2500003L, 3000003L, 4000003L) + ) + } + + it("has the distribution of shuffle write among executors") { + evaluator.shuffleWriteBytesDistribution should be( + Distribution(1000004L, 1000004L, 2500004L, 3000004L, 4000004L) + ) + } + + it("computes the overall severity") { + evaluator.severity should be(Severity.LOW) + } + + it("computes the severity of a given distribution, when the max is large enough") { + val distribution = Distribution(min = 0L, p25 = 1000L, median = 1000L, p75 = 1000L, max = 16000L) + evaluator.severityOfDistribution(distribution, ignoreMaxLessThanThreshold = 16000L) should be(Severity.CRITICAL) + } + + it("computes the severity of a given distribution, when the max is not large enough") { + val distribution = Distribution(min = 0L, p25 = 1000L, median = 1000L, p75 = 1000L, max = 16000L) + evaluator.severityOfDistribution(distribution, ignoreMaxLessThanThreshold = 16001L) should be(Severity.NONE) + } + + it("computes the severity of a given distribution, when the median is zero and the max is large enough") { + val distribution = Distribution(min = 0L, p25 = 0L, median = 0L, p75 = 0L, max = 16000L) + evaluator.severityOfDistribution(distribution, ignoreMaxLessThanThreshold = 16000L) should be(Severity.CRITICAL) + } + + it("computes the severity of a given distribution, when the median is zero and the max is not large enough") { + val distribution = Distribution(min = 0L, p25 = 0L, median = 0L, p75 = 0L, max = 16000L) + evaluator.severityOfDistribution(distribution, ignoreMaxLessThanThreshold = 16001L) should be(Severity.NONE) + } + } + } +} + +object ExecutorsHeuristicTest { + import JavaConverters._ + + def newFakeHeuristicConfigurationData(params: Map[String, String] = Map.empty): HeuristicConfigurationData = + new HeuristicConfigurationData("heuristic", "class", "view", new ApplicationType("type"), params.asJava) + + def newFakeExecutorSummary( + id: String, + memoryUsed: Long, + totalDuration: Long, + totalInputBytes: Long, + totalShuffleRead: Long, + totalShuffleWrite: Long, + maxMemory: Long + ): ExecutorSummary = new ExecutorSummary( + id, + hostPort = "", + rddBlocks = 0, + memoryUsed, + diskUsed = 0, + activeTasks = 0, + failedTasks = 0, + completedTasks = 0, + totalTasks = 0, + totalDuration, + totalInputBytes, + totalShuffleRead, + totalShuffleWrite, + maxMemory, + executorLogs = Map.empty + ) + + def newFakeSparkApplicationData(executorSummaries: Seq[ExecutorSummary]): SparkApplicationData = { + val appId = "application_1" + + val restDerivedData = SparkRestDerivedData( + new ApplicationInfo(appId, name = "app", Seq.empty), + jobDatas = Seq.empty, + stageDatas = Seq.empty, + executorSummaries + ) + + SparkApplicationData(appId, restDerivedData, logDerivedData = None) + } +} diff --git a/test/com/linkedin/drelephant/spark/heuristics/JobsHeuristicTest.scala b/test/com/linkedin/drelephant/spark/heuristics/JobsHeuristicTest.scala new file mode 100644 index 000000000..2a992576f --- /dev/null +++ b/test/com/linkedin/drelephant/spark/heuristics/JobsHeuristicTest.scala @@ -0,0 +1,165 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.spark.heuristics + +import scala.collection.JavaConverters + +import com.linkedin.drelephant.analysis.{ApplicationType, Severity} +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData +import com.linkedin.drelephant.spark.data.{SparkApplicationData, SparkLogDerivedData, SparkRestDerivedData} +import com.linkedin.drelephant.spark.fetchers.statusapiv1.{ApplicationInfo, JobData} +import org.apache.spark.JobExecutionStatus +import org.apache.spark.scheduler.SparkListenerEnvironmentUpdate +import org.scalatest.{FunSpec, Matchers} + + +class JobsHeuristicTest extends FunSpec with Matchers { + import JobsHeuristicTest._ + + describe("JobsHeuristic") { + val heuristicConfigurationData = newFakeHeuristicConfigurationData( + Map( + "job_failure_rate_severity_thresholds" -> "0.2,0.4,0.6,0.8", + "job_task_failure_rate_severity_thresholds" -> "0.2,0.4,0.6,0.8" + ) + ) + val jobsHeuristic = new JobsHeuristic(heuristicConfigurationData) + val jobDatas = Seq( + newFakeJobData(0, "foo", JobExecutionStatus.SUCCEEDED, numCompleteTasks = 10, numFailedTasks = 0), + newFakeJobData(1, "bar", JobExecutionStatus.SUCCEEDED, numCompleteTasks = 8, numFailedTasks = 2), + newFakeJobData(2, "baz", JobExecutionStatus.SUCCEEDED, numCompleteTasks = 6, numFailedTasks = 4), + newFakeJobData(3, "aaa", JobExecutionStatus.FAILED, numCompleteTasks = 4, numFailedTasks = 6), + newFakeJobData(4, "zzz", JobExecutionStatus.FAILED, numCompleteTasks = 2, numFailedTasks = 8) + ) + + describe(".apply") { + val data = newFakeSparkApplicationData(jobDatas) + val heuristicResult = jobsHeuristic.apply(data) + val heuristicResultDetails = heuristicResult.getHeuristicResultDetails + + it("returns the severity") { + heuristicResult.getSeverity should be(Severity.CRITICAL) + } + + it("returns the number of completed jobs") { + heuristicResultDetails.get(0).getValue should be("3") + } + + it("returns the number of failed jobs") { + heuristicResultDetails.get(1).getValue should be("2") + } + + it("returns the list of failed jobs") { + heuristicResultDetails.get(2).getValue should be( + s"""|job 3, aaa + |job 4, zzz""".stripMargin + ) + } + + it("returns the job failure rate") { + heuristicResultDetails.get(3).getValue should be("0.400") + } + + it("returns the list of jobs with high task failure rates") { + heuristicResultDetails.get(4).getValue should be( + s"""|job 3, aaa (task failure rate: 0.600) + |job 4, zzz (task failure rate: 0.800)""".stripMargin + ) + } + } + + describe(".Evaluator") { + import JobsHeuristic.Evaluator + + val data = newFakeSparkApplicationData(jobDatas) + val evaluator = new Evaluator(jobsHeuristic, data) + + it("has the number of completed jobs") { + evaluator.numCompletedJobs should be(3) + } + + it("has the number of failed jobs") { + evaluator.numFailedJobs should be(2) + } + + it("has the list of failed jobs") { + val jobIds = evaluator.failedJobs.map { _.jobId } + jobIds should contain theSameElementsInOrderAs(Seq(3, 4)) + } + + it("has the job failure rate") { + evaluator.jobFailureRate should be(Some(0.4D)) + } + + it("has the list of jobs with high task failure rates") { + val jobIdsAndTaskFailureRates = + evaluator.jobsWithHighTaskFailureRates.map { case (jobData, taskFailureRate) => (jobData.jobId, taskFailureRate) } + jobIdsAndTaskFailureRates should contain theSameElementsInOrderAs(Seq((3, 0.6D), (4, 0.8D))) + } + + it("computes the overall severity") { + evaluator.severity should be(Severity.CRITICAL) + } + } + } +} + +object JobsHeuristicTest { + import JavaConverters._ + + def newFakeHeuristicConfigurationData(params: Map[String, String] = Map.empty): HeuristicConfigurationData = + new HeuristicConfigurationData("heuristic", "class", "view", new ApplicationType("type"), params.asJava) + + def newFakeJobData( + jobId: Int, + name: String, + status: JobExecutionStatus, + numCompleteTasks: Int, + numFailedTasks: Int + ): JobData = new JobData( + jobId, + name, + description = None, + submissionTime = None, + completionTime = None, + stageIds = Seq.empty, + jobGroup = None, + status, + numTasks = numCompleteTasks + numFailedTasks, + numActiveTasks = 0, + numCompleteTasks, + numSkippedTasks = 0, + numFailedTasks, + numActiveStages = 0, + numCompletedStages = 0, + numSkippedStages = 0, + numFailedStages = 0 + ) + + def newFakeSparkApplicationData(jobDatas: Seq[JobData]): SparkApplicationData = { + val appId = "application_1" + + val restDerivedData = SparkRestDerivedData( + new ApplicationInfo(appId, name = "app", Seq.empty), + jobDatas, + stageDatas = Seq.empty, + executorSummaries = Seq.empty + ) + + SparkApplicationData(appId, restDerivedData, logDerivedData = None) + } +} diff --git a/test/com/linkedin/drelephant/spark/heuristics/StagesHeuristicTest.scala b/test/com/linkedin/drelephant/spark/heuristics/StagesHeuristicTest.scala new file mode 100644 index 000000000..6794df530 --- /dev/null +++ b/test/com/linkedin/drelephant/spark/heuristics/StagesHeuristicTest.scala @@ -0,0 +1,191 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.spark.heuristics + +import scala.collection.JavaConverters +import scala.concurrent.duration.Duration + +import com.linkedin.drelephant.analysis.{ApplicationType, Severity} +import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData +import com.linkedin.drelephant.spark.data.{SparkApplicationData, SparkLogDerivedData, SparkRestDerivedData} +import com.linkedin.drelephant.spark.fetchers.statusapiv1.{ApplicationInfo, JobData, StageData} +import org.apache.spark.scheduler.SparkListenerEnvironmentUpdate +import org.apache.spark.status.api.v1.StageStatus +import org.scalatest.{FunSpec, Matchers} + + +class StagesHeuristicTest extends FunSpec with Matchers { + import StagesHeuristicTest._ + + describe("StagesHeuristic") { + val heuristicConfigurationData = newFakeHeuristicConfigurationData( + Map( + "stage_failure_rate_severity_thresholds" -> "0.2,0.4,0.6,0.8", + "stage_task_failure_rate_severity_thresholds" -> "0.2,0.4,0.6,0.8", + "stage_runtime_minutes_severity_thresholds" -> "15,30,45,60" + ) + ) + val stagesHeuristic = new StagesHeuristic(heuristicConfigurationData) + val stageDatas = Seq( + newFakeStageData(StageStatus.COMPLETE, 0, numCompleteTasks = 10, numFailedTasks = 0, executorRunTime = Duration("2min").toMillis, "foo"), + newFakeStageData(StageStatus.COMPLETE, 1, numCompleteTasks = 8, numFailedTasks = 2, executorRunTime = Duration("2min").toMillis, "bar"), + newFakeStageData(StageStatus.COMPLETE, 2, numCompleteTasks = 6, numFailedTasks = 4, executorRunTime = Duration("2min").toMillis, "baz"), + newFakeStageData(StageStatus.FAILED, 3, numCompleteTasks = 4, numFailedTasks = 6, executorRunTime = Duration("2min").toMillis, "aaa"), + newFakeStageData(StageStatus.FAILED, 4, numCompleteTasks = 2, numFailedTasks = 8, executorRunTime = Duration("2min").toMillis, "zzz"), + newFakeStageData(StageStatus.COMPLETE, 5, numCompleteTasks = 10, numFailedTasks = 0, executorRunTime = Duration("0min").toMillis, "bbb"), + newFakeStageData(StageStatus.COMPLETE, 6, numCompleteTasks = 10, numFailedTasks = 0, executorRunTime = Duration("30min").toMillis, "ccc"), + newFakeStageData(StageStatus.COMPLETE, 7, numCompleteTasks = 10, numFailedTasks = 0, executorRunTime = Duration("60min").toMillis, "ddd"), + newFakeStageData(StageStatus.COMPLETE, 8, numCompleteTasks = 10, numFailedTasks = 0, executorRunTime = Duration("90min").toMillis, "eee"), + newFakeStageData(StageStatus.COMPLETE, 9, numCompleteTasks = 10, numFailedTasks = 0, executorRunTime = Duration("120min").toMillis, "fff") + ) + + val appConfigurationProperties = Map("spark.executor.instances" -> "2") + + describe(".apply") { + val data = newFakeSparkApplicationData(stageDatas, appConfigurationProperties) + val heuristicResult = stagesHeuristic.apply(data) + val heuristicResultDetails = heuristicResult.getHeuristicResultDetails + + it("returns the severity") { + heuristicResult.getSeverity should be(Severity.CRITICAL) + } + + it("returns the number of completed stages") { + heuristicResultDetails.get(0).getValue should be("8") + } + + it("returns the number of failed stages") { + heuristicResultDetails.get(1).getValue should be("2") + } + + it("returns the stage failure rate") { + heuristicResultDetails.get(2).getValue should be("0.200") + } + + it("returns the list of stages with high task failure rates") { + heuristicResultDetails.get(3).getValue should be( + s"""|stage 3, attempt 0 (task failure rate: 0.600) + |stage 4, attempt 0 (task failure rate: 0.800)""".stripMargin + ) + } + + it("returns the list of stages with long runtimes") { + heuristicResultDetails.get(4).getValue should be( + s"""|stage 8, attempt 0 (runtime: 45 min) + |stage 9, attempt 0 (runtime: 1 hr)""".stripMargin + ) + } + } + + describe(".Evaluator") { + import StagesHeuristic.Evaluator + + val data = newFakeSparkApplicationData(stageDatas, appConfigurationProperties) + val evaluator = new Evaluator(stagesHeuristic, data) + + it("has the number of completed stages") { + evaluator.numCompletedStages should be(8) + } + + it("has the number of failed stages") { + evaluator.numFailedStages should be(2) + } + + it("has the stage failure rate") { + evaluator.stageFailureRate should be(Some(0.2D)) + } + + it("has the list of stages with high task failure rates") { + val stageIdsAndTaskFailureRates = + evaluator.stagesWithHighTaskFailureRates.map { case (stageData, taskFailureRate) => (stageData.stageId, taskFailureRate) } + stageIdsAndTaskFailureRates should contain theSameElementsInOrderAs(Seq((3, 0.6D), (4, 0.8D))) + } + + it("has the list of stages with long average executor runtimes") { + val stageIdsAndRuntimes = + evaluator.stagesWithLongAverageExecutorRuntimes.map { case (stageData, runtime) => (stageData.stageId, runtime) } + stageIdsAndRuntimes should contain theSameElementsInOrderAs( + Seq((8, Duration("45min").toMillis), (9, Duration("60min").toMillis)) + ) + } + + it("computes the overall severity") { + evaluator.severity should be(Severity.CRITICAL) + } + } + } +} + +object StagesHeuristicTest { + import JavaConverters._ + + def newFakeHeuristicConfigurationData(params: Map[String, String] = Map.empty): HeuristicConfigurationData = + new HeuristicConfigurationData("heuristic", "class", "view", new ApplicationType("type"), params.asJava) + + def newFakeStageData( + status: StageStatus, + stageId: Int, + numCompleteTasks: Int, + numFailedTasks: Int, + executorRunTime: Long, + name: String + ): StageData = new StageData( + status, + stageId, + attemptId = 0, + numActiveTasks = numCompleteTasks + numFailedTasks, + numCompleteTasks, + numFailedTasks, + executorRunTime, + inputBytes = 0, + inputRecords = 0, + outputBytes = 0, + outputRecords = 0, + shuffleReadBytes = 0, + shuffleReadRecords = 0, + shuffleWriteBytes = 0, + shuffleWriteRecords = 0, + memoryBytesSpilled = 0, + diskBytesSpilled = 0, + name, + details = "", + schedulingPool = "", + accumulatorUpdates = Seq.empty, + tasks = None, + executorSummary = None + ) + + def newFakeSparkApplicationData( + stageDatas: Seq[StageData], + appConfigurationProperties: Map[String, String] + ): SparkApplicationData = { + val appId = "application_1" + + val restDerivedData = SparkRestDerivedData( + new ApplicationInfo(appId, name = "app", Seq.empty), + jobDatas = Seq.empty, + stageDatas, + executorSummaries = Seq.empty + ) + + val logDerivedData = SparkLogDerivedData( + SparkListenerEnvironmentUpdate(Map("Spark Properties" -> appConfigurationProperties.toSeq)) + ) + + SparkApplicationData(appId, restDerivedData, Some(logDerivedData)) + } +} diff --git a/test/com/linkedin/drelephant/util/InfoExtractorTest.java b/test/com/linkedin/drelephant/util/InfoExtractorTest.java new file mode 100644 index 000000000..b55293030 --- /dev/null +++ b/test/com/linkedin/drelephant/util/InfoExtractorTest.java @@ -0,0 +1,309 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.util; + +import com.linkedin.drelephant.analysis.ApplicationType; +import com.linkedin.drelephant.analysis.HadoopApplicationData; +import com.linkedin.drelephant.configurations.scheduler.SchedulerConfigurationData; +import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; +import com.linkedin.drelephant.schedulers.AirflowScheduler; +import com.linkedin.drelephant.schedulers.AzkabanScheduler; +import com.linkedin.drelephant.schedulers.OozieScheduler; +import com.linkedin.drelephant.schedulers.Scheduler; + +import com.linkedin.drelephant.spark.data.SparkApplicationData; +import com.linkedin.drelephant.spark.fetchers.statusapiv1.ApplicationAttemptInfo; +import com.linkedin.drelephant.spark.fetchers.statusapiv1.ApplicationInfo; +import com.linkedin.drelephant.spark.fetchers.statusapiv1.ExecutorSummary; +import com.linkedin.drelephant.spark.fetchers.statusapiv1.JobData; +import com.linkedin.drelephant.spark.fetchers.statusapiv1.StageData; +import java.util.ArrayList; +import java.util.Properties; +import models.AppResult; + +import org.apache.log4j.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; + +import mockit.Expectations; +import mockit.Mocked; +import mockit.integration.junit4.JMockit; +import org.apache.commons.lang.StringUtils; +import org.apache.oozie.client.OozieClient; +import org.apache.oozie.client.WorkflowJob; + +import play.test.FakeApplication; +import play.test.Helpers; + +import scala.Tuple2; +import scala.collection.immutable.Map; +import scala.collection.immutable.HashMap; +import scala.collection.immutable.Vector; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + + +@RunWith(JMockit.class) +public class InfoExtractorTest { + @Mocked + OozieClient oozieClient; + + @Mocked + WorkflowJob workflowJob; + + @Mocked + WorkflowJob parentWorkflowJob; + + private FakeApplication app; + + @Before + public void startApp() throws Exception { + app = Helpers.fakeApplication(Helpers.inMemoryDatabase()); + Helpers.start(app); + } + + @After + public void stopApp() throws Exception { + Helpers.stop(app); + } + + @Test + public void testGetSchedulerInstanceAzkaban() { + Properties properties = new Properties(); + properties.put(AzkabanScheduler.AZKABAN_WORKFLOW_URL, "azkaban_workflow_url"); + properties.put(AzkabanScheduler.AZKABAN_JOB_URL, "azkaba_job_url"); + properties.put(AzkabanScheduler.AZKABAN_EXECUTION_URL, "azkaban_execution_url"); + properties.put(AzkabanScheduler.AZKABAN_ATTEMPT_URL, "azkaba_attempt_url"); + properties.put(AzkabanScheduler.AZKABAN_JOB_NAME, "azkaba_job_name"); + + Scheduler scheduler = InfoExtractor.getSchedulerInstance("id", properties); + assertEquals(true, scheduler instanceof AzkabanScheduler); + assertEquals("azkaban_workflow_url", scheduler.getFlowDefId()); + assertEquals("azkaba_job_url", scheduler.getJobDefId()); + assertEquals("azkaban_execution_url", scheduler.getFlowExecId()); + assertEquals("azkaba_attempt_url", scheduler.getJobExecId()); + assertEquals("azkaba_job_name", scheduler.getJobName()); + assertEquals("azkaban", scheduler.getSchedulerName()); + } + + @Test + public void testGetSchedulerInstanceAirflow() { + Properties properties = new Properties(); + properties.put(AirflowScheduler.AIRFLOW_DAG_ID, "airflow_dag_id"); + properties.put(AirflowScheduler.AIRFLOW_DAG_RUN_EXECUTION_DATE, "airflow_dag_run_execution_date"); + properties.put(AirflowScheduler.AIRFLOW_TASK_ID, "airflow_task_id"); + properties.put(AirflowScheduler.AIRFLOW_TASK_INSTANCE_EXECUTION_DATE, "airflow_task_instance_execution_date"); + + Scheduler scheduler = InfoExtractor.getSchedulerInstance("id", properties); + assertEquals(true, scheduler instanceof AirflowScheduler); + assertEquals("airflow_dag_id", scheduler.getFlowDefId()); + assertEquals("airflow_dag_id/airflow_dag_run_execution_date", scheduler.getFlowExecId()); + assertEquals("airflow_dag_id/airflow_task_id", scheduler.getJobDefId()); + assertEquals("airflow_dag_id/airflow_dag_run_execution_date/airflow_task_id/airflow_task_instance_execution_date", scheduler.getJobExecId()); + assertEquals("airflow_task_id", scheduler.getJobName()); + assertEquals("airflow", scheduler.getSchedulerName()); + } + + @Test + public void testGetSchedulerInstanceOozie() throws Exception { + final String jobInfo = "0004167-160629080632562-oozie-oozi-W"; + final String jobParentInfo = "0004166-160629080632562-oozie-oozi-W"; + Properties properties = new Properties(); + properties.put("oozie.action.id", jobInfo + "@some-action"); + properties.put("oozie.job.id", jobInfo); + + new Expectations() {{ + workflowJob.getId(); + result = jobInfo; + + workflowJob.getParentId(); + result = jobParentInfo; + + oozieClient.getJobInfo(jobInfo); + result = workflowJob; + + parentWorkflowJob.getId(); + result = jobParentInfo; + + parentWorkflowJob.getParentId(); + result = null; + + oozieClient.getJobInfo(jobParentInfo); + result = parentWorkflowJob; + }}; + + Scheduler scheduler = InfoExtractor.getSchedulerInstance("id", properties); + assertEquals(true, scheduler instanceof OozieScheduler); + assertEquals("oozie", scheduler.getSchedulerName()); + assertEquals(jobParentInfo, scheduler.getFlowDefId()); + assertEquals(jobParentInfo, scheduler.getFlowExecId()); + assertEquals(jobInfo, scheduler.getJobDefId()); + assertEquals(jobInfo, scheduler.getJobExecId()); + assertEquals(jobInfo, scheduler.getJobName()); + } + + @Test + public void testGetSchedulerInstanceNull() { + Properties properties = new Properties(); + + Scheduler scheduler = InfoExtractor.getSchedulerInstance("id", properties); + assertEquals(null, scheduler); + } + + @Test + public void testLoadSchedulerInfo() { + Properties properties = new Properties(); + properties.put(AzkabanScheduler.AZKABAN_JOB_URL, + "https://grid.example.com:9000/manager?project=project-name&flow=flow-name&job=job-name"); + properties.put(AzkabanScheduler.AZKABAN_ATTEMPT_URL, + "https://grid.example.com:9000/executor?execid=123456&job=job-name&attempt=0"); + properties.put(AzkabanScheduler.AZKABAN_WORKFLOW_URL, + "https://grid.example.com:9000/manager?project=project-name&flow=flow-name"); + properties.put(AzkabanScheduler.AZKABAN_EXECUTION_URL, + "https://grid.example.com:9000/executor?execid=123456"); + properties.put(AzkabanScheduler.AZKABAN_JOB_NAME, "job-name"); + + SchedulerConfigurationData schedulerConfigurationData = new SchedulerConfigurationData("azkaban", null, null); + + Scheduler scheduler = new AzkabanScheduler("id", properties, schedulerConfigurationData); + + AppResult result = new AppResult(); + + HadoopApplicationData data = + new HadoopApplicationData() { + String appId = "application_5678"; + Properties conf = new Properties(); + ApplicationType applicationType = new ApplicationType("foo"); + + @Override + public String getAppId() { + return appId; + } + + @Override + public Properties getConf() { + return conf; + } + + @Override + public ApplicationType getApplicationType() { + return applicationType; + } + + @Override + public boolean isEmpty() { + return false; + } + }; + + InfoExtractor.loadSchedulerInfo(result, data, scheduler); + + assertEquals(result.scheduler, "azkaban"); + assertFalse(StringUtils.isEmpty(result.getJobExecId())); + assertFalse(StringUtils.isEmpty(result.getJobDefId())); + assertFalse(StringUtils.isEmpty(result.getFlowExecId())); + assertFalse(StringUtils.isEmpty(result.getFlowDefId())); + assertFalse(StringUtils.isEmpty(result.getJobExecUrl())); + assertFalse(StringUtils.isEmpty(result.getJobDefUrl())); + assertFalse(StringUtils.isEmpty(result.getFlowExecUrl())); + assertFalse(StringUtils.isEmpty(result.getFlowDefUrl())); + } + + @Test + public void testLoadInfoMapReduce() { + final String JOB_DEF_URL = "https://grid.example.com:9000/manager?project=project-name&flow=flow-name&job=job-name"; + final String JOB_EXEC_URL = "https://grid.example.com:9000/executor?execid=123456&job=job-name&attempt=0"; + final String FLOW_DEF_URL = "https://grid.example.com:9000/manager?project=project-name&flow=flow-name"; + final String FLOW_EXEC_URL = "https://grid.example.com:9000/executor?execid=123456"; + final String JOB_NAME = "job-name"; + Properties properties = new Properties(); + properties.put(AzkabanScheduler.AZKABAN_JOB_URL, JOB_DEF_URL); + properties.put(AzkabanScheduler.AZKABAN_ATTEMPT_URL, JOB_EXEC_URL ); + properties.put(AzkabanScheduler.AZKABAN_WORKFLOW_URL, FLOW_DEF_URL); + properties.put(AzkabanScheduler.AZKABAN_EXECUTION_URL, FLOW_EXEC_URL); + properties.put(AzkabanScheduler.AZKABAN_JOB_NAME, JOB_NAME); + + AppResult result = new AppResult(); + + HadoopApplicationData data = + (new MapReduceApplicationData()).setAppId("application_5678").setJobConf(properties); + + InfoExtractor.loadInfo(result, data); + + assertTrue(result.jobDefId.equals(JOB_DEF_URL)); + assertTrue(result.jobExecId.equals(JOB_EXEC_URL)); + assertTrue(result.flowDefId.equals(FLOW_DEF_URL)); + assertTrue(result.flowExecId.equals(FLOW_EXEC_URL)); + } + + @Test + public void testLoadInfoSpark() { + final String JOB_DEF_URL = "https://grid.example.com:9000/manager?project=project-name&flow=flow-name&job=job-name"; + final String JOB_EXEC_URL = "https://grid.example.com:9000/executor?execid=123456&job=job-name&attempt=0"; + final String FLOW_DEF_URL = "https://grid.example.com:9000/manager?project=project-name&flow=flow-name"; + final String FLOW_EXEC_URL = "https://grid.example.com:9000/executor?execid=123456"; + final String JAVA_EXTRA_OPTIONS = "spark.driver.extraJavaOptions"; + Map properties = new HashMap(); + properties = properties.$plus(new Tuple2(JAVA_EXTRA_OPTIONS, "-Dazkaban.link.workflow.url=" + FLOW_DEF_URL + + " -Dazkaban.link.job.url=" + JOB_DEF_URL + + " -Dazkaban.link.execution.url=" + FLOW_EXEC_URL + + " -Dazkaban.link.attempt.url=" + JOB_EXEC_URL)); + + AppResult result = new AppResult(); + + HadoopApplicationData data = new SparkApplicationData("application_5678", + properties, + new ApplicationInfo("", "", new Vector(0,1,0)), + new Vector(0,1,0), + new Vector(0,1,0), + new Vector(0,1,0)); + + InfoExtractor.loadInfo(result, data); + + assertTrue(result.jobDefId.equals(JOB_DEF_URL)); + assertTrue(result.jobExecId.equals(JOB_EXEC_URL)); + assertTrue(result.flowDefId.equals(FLOW_DEF_URL)); + assertTrue(result.flowExecId.equals(FLOW_EXEC_URL)); + } + + @Test + public void testLoadInfoSparkNoConfig() { + Map properties = new HashMap(); + + AppResult result = new AppResult(); + + HadoopApplicationData data = new SparkApplicationData("application_5678", + properties, + new ApplicationInfo("", "", new Vector(0,1,0)), + new Vector(0,1,0), + new Vector(0,1,0), + new Vector(0,1,0)); + + // test to make sure loadInfo does not throw exception if properties are not defined + InfoExtractor.loadInfo(result, data); + + assertTrue(result.jobDefId.isEmpty()); + assertTrue(result.jobExecId.isEmpty()); + assertTrue(result.flowDefId.isEmpty()); + assertTrue(result.flowExecId.isEmpty()); + } +} diff --git a/test/com/linkedin/drelephant/util/MemoryFormatUtilsTest.java b/test/com/linkedin/drelephant/util/MemoryFormatUtilsTest.java new file mode 100644 index 000000000..0ae064ebc --- /dev/null +++ b/test/com/linkedin/drelephant/util/MemoryFormatUtilsTest.java @@ -0,0 +1,108 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.util; + +import java.util.Random; +import junit.framework.TestCase; + + +/** + * This class test MemoryFormatUtils + */ +public class MemoryFormatUtilsTest extends TestCase { + public void testBytesToString() { + // Check integer values + assertEquals("0 B", MemoryFormatUtils.bytesToString(0L)); + assertEquals("1 B", MemoryFormatUtils.bytesToString(1L)); + assertEquals("1 KB", MemoryFormatUtils.bytesToString(1L << 10)); + assertEquals("1 MB", MemoryFormatUtils.bytesToString(1L << 20)); + assertEquals("1 GB", MemoryFormatUtils.bytesToString(1L << 30)); + assertEquals("1,000 GB", MemoryFormatUtils.bytesToString(1000L << 30)); + assertEquals("1 TB", MemoryFormatUtils.bytesToString(1L << 40)); + assertEquals("1,024 TB", MemoryFormatUtils.bytesToString(1L << 50)); + + // Check double values + assertEquals("8.79 KB", MemoryFormatUtils.bytesToString(9000L)); + assertEquals("8.79 MB", MemoryFormatUtils.bytesToString(9000L << 10)); + assertEquals("8.79 GB", MemoryFormatUtils.bytesToString(9000L << 20)); + assertEquals("8.79 TB", MemoryFormatUtils.bytesToString(9000L << 30)); + assertEquals("87,890.62 TB", MemoryFormatUtils.bytesToString((9000L * 10000) << 30)); + } + + public void testScientificStringNumberToBytes() { + long expectedBytes = (long ) (8.79 * (1 << 20)); + assertEquals(expectedBytes, MemoryFormatUtils.stringToBytes("8.79e0 MB")); + assertEquals(expectedBytes, MemoryFormatUtils.stringToBytes("8.79E0 MB")); + assertEquals(expectedBytes, MemoryFormatUtils.stringToBytes("0.879e1 MB")); + assertEquals(expectedBytes, MemoryFormatUtils.stringToBytes("+0.879e+1 MB")); + assertEquals(expectedBytes, MemoryFormatUtils.stringToBytes("87.9e-1 MB")); + } + + public void testStringToBytes() { + // Null + assertEquals(0L, MemoryFormatUtils.stringToBytes(null)); + + // Integer tests + String[] units = new String[]{"", "K", "M", "G", "T"}; + long[] multiplers = new long[]{1L, 1L << 10, 1L << 20, 1L << 30, 1L << 40}; + Random random = new Random(System.currentTimeMillis()); + for (int i = 0; i < units.length; i++) { + String unit = units[i]; + + // Int values + int[] integerNums = + units.equals("T") ? new int[]{0, random.nextInt(1024), 2048} : new int[]{0, random.nextInt(1024)}; + for (int num : integerNums) { + long expectedNum = num * multiplers[i]; + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + unit)); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + " " + unit)); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + unit + "b")); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + unit + "B")); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + " " + unit + "B")); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + " " + unit + "b")); + + unit = unit.toLowerCase(); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + unit)); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + " " + unit)); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + unit + "b")); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + unit + "B")); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + " " + unit + "B")); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + " " + unit + "b")); + } + + // Double values, notice that converting double values to long might lose some precision + double[] doublleNums = new double[]{random.nextDouble(), random.nextDouble() + random.nextInt(1024), 8.79d}; + for (double num : doublleNums) { + long expectedNum = (long) (num * multiplers[i]); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + unit)); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + " " + unit)); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + unit + "b")); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + unit + "B")); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + " " + unit + "B")); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + " " + unit + "b")); + + unit = unit.toLowerCase(); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + unit)); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + " " + unit)); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + unit + "b")); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + unit + "B")); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + " " + unit + "B")); + assertEquals(expectedNum, MemoryFormatUtils.stringToBytes(num + " " + unit + "b")); + } + } + } +} diff --git a/test/com/linkedin/drelephant/util/UtilsTest.java b/test/com/linkedin/drelephant/util/UtilsTest.java new file mode 100644 index 000000000..69f5509c4 --- /dev/null +++ b/test/com/linkedin/drelephant/util/UtilsTest.java @@ -0,0 +1,226 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package com.linkedin.drelephant.util; + + +import java.util.HashMap; +import java.util.Map; + +import org.apache.hadoop.conf.Configuration; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + + +/** + * This class tests the Utils class + */ +public class UtilsTest { + + @Test + public void testParseJavaOptions() { + Map options1 = Utils.parseJavaOptions("-Dfoo=bar"); + assertEquals(1, options1.size()); + assertEquals("bar", options1.get("foo")); + + Map options2 = Utils.parseJavaOptions(" -Dfoo=bar -Dfoo2=bar2 -Dfoo3=bar3"); + assertEquals(3, options2.size()); + assertEquals("bar", options2.get("foo")); + assertEquals("bar2", options2.get("foo2")); + assertEquals("bar3", options2.get("foo3")); + } + + @Test + public void testParseJavaOptionsIgnoresNonStandardOptions() { + Map options1 = Utils.parseJavaOptions("-Dfoo=bar -XX:+UseCompressedOops -XX:MaxPermSize=512m -Dfoo2=bar2"); + assertEquals(2, options1.size()); + assertEquals("bar", options1.get("foo")); + assertEquals("bar2", options1.get("foo2")); + } + + @Test(expected = IllegalArgumentException.class) + public void testParseJavaOptionsThrowsIllegalArgumentExceptionForMissingAssignment() { + Utils.parseJavaOptions("-Dfoo"); + } + + @Test(expected = IllegalArgumentException.class) + public void testParseJavaOptionsThrowsIllegalArgumentExceptionForUnexpectedProperties() { + Utils.parseJavaOptions("-foo"); + } + + @Test + public void testGetParam() { + Map paramMap = new HashMap(); + paramMap.put("test_severity_1", "10, 50, 100, 200"); + paramMap.put("test_severity_2", "2, 4, 8"); + paramMap.put("test_param_1", "2&"); + paramMap.put("test_param_2", "2"); + paramMap.put("test_param_3", ""); + paramMap.put("test_param_4", null); + + double limits1[] = Utils.getParam(paramMap.get("test_severity_1"), 4); + assertEquals(10d, limits1[0], 0); + assertEquals(50d, limits1[1], 0); + assertEquals(100d, limits1[2], 0); + assertEquals(200d, limits1[3], 0); + + double limits2[] = Utils.getParam(paramMap.get("test_severity_2"), 4); + assertEquals(null, limits2); + + double limits3[] = Utils.getParam(paramMap.get("test_param_1"), 1); + assertEquals(null, limits3); + + double limits4[] = Utils.getParam(paramMap.get("test_param_2"), 1); + assertEquals(2d, limits4[0], 0); + + double limits5[] = Utils.getParam(paramMap.get("test_param_3"), 1); + assertEquals(null, limits5); + + double limits6[] = Utils.getParam(paramMap.get("test_param_4"), 1); + assertEquals(null, limits6); + } + + @Test + public void testCommaSeparated() { + String commaSeparated1 = Utils.commaSeparated("foo"); + assertEquals("foo", commaSeparated1); + + String commaSeparated2 = Utils.commaSeparated("foo", "bar", ""); + assertEquals("foo,bar", commaSeparated2); + + String commaSeparated3 = Utils.commaSeparated("foo", "bar", null); + assertEquals("foo,bar", commaSeparated3); + + String commaSeparated4 = Utils.commaSeparated(); + assertEquals("", commaSeparated4); + } + + @Test + public void testTruncateField() { + String truncatedField1 = Utils.truncateField("foo-bar", 7, "id"); + assertEquals("foo-bar", truncatedField1); + + String truncatedField2 = Utils.truncateField("foo-bar", 6, "id"); + assertEquals("foo...", truncatedField2); + + String truncatedField3 = Utils.truncateField("foo-bar", -1, "id"); + assertEquals("foo-bar", truncatedField3); + + String truncatedField4 = Utils.truncateField(null, 5, "id"); + assertEquals(null, truncatedField4); + } + + @Test + public void testParseCsKeyValue() { + Map properties = Utils.parseCsKeyValue(""); + assertEquals(0, properties.size()); + + Map properties1 = Utils.parseCsKeyValue("foo=bar"); + assertEquals(1, properties1.size()); + assertEquals("bar", properties1.get("foo")); + + Map properties2 = Utils.parseCsKeyValue("foo1=bar1,foo2=bar2,foo3=bar3"); + assertEquals(3, properties2.size()); + assertEquals("bar1", properties2.get("foo1")); + assertEquals("bar2", properties2.get("foo2")); + assertEquals("bar3", properties2.get("foo3")); + } + + @Test + public void testGetNonNegativeInt() { + Configuration conf = new Configuration(); + conf.set("foo1", "100"); + conf.set("foo2", "-100"); + conf.set("foo3", "0"); + conf.set("foo4", "0.5"); + conf.set("foo5", "9999999999999999"); + conf.set("foo6", "bar"); + + int defaultValue = 50; + assertEquals(100, Utils.getNonNegativeInt(conf, "foo1", defaultValue)); + assertEquals(0, Utils.getNonNegativeInt(conf, "foo2", defaultValue)); + assertEquals(0, Utils.getNonNegativeInt(conf, "foo3", defaultValue)); + assertEquals(defaultValue, Utils.getNonNegativeInt(conf, "foo4", defaultValue)); + assertEquals(defaultValue, Utils.getNonNegativeInt(conf, "foo5", defaultValue)); + assertEquals(defaultValue, Utils.getNonNegativeInt(conf, "foo6", defaultValue)); + assertEquals(defaultValue, Utils.getNonNegativeInt(conf, "foo7", defaultValue)); + } + + @Test + public void testGetNonNegativeLong() { + Configuration conf = new Configuration(); + + conf.set("foo1", "100"); + conf.set("foo2", "-100"); + conf.set("foo3", "0"); + conf.set("foo4", "0.5"); + conf.set("foo5", "9999999999999999"); + conf.set("foo6", "bar"); + + long defaultValue = 50; + assertEquals(100, Utils.getNonNegativeLong(conf, "foo1", defaultValue)); + assertEquals(0, Utils.getNonNegativeLong(conf, "foo2", defaultValue)); + assertEquals(0, Utils.getNonNegativeLong(conf, "foo3", defaultValue)); + assertEquals(defaultValue, Utils.getNonNegativeLong(conf, "foo4", defaultValue)); + assertEquals(9999999999999999L, Utils.getNonNegativeLong(conf, "foo5", defaultValue)); + assertEquals(defaultValue, Utils.getNonNegativeLong(conf, "foo6", defaultValue)); + assertEquals(defaultValue, Utils.getNonNegativeLong(conf, "foo7", defaultValue)); + } + + @Test + public void testFormatStringOrNull() { + assertEquals("Hello world!", Utils.formatStringOrNull("%s %s!", "Hello", "world")); + assertEquals(null, Utils.formatStringOrNull("%s %s!", "Hello", null)); + } + + @Test + public void testGetDurationBreakdown() { + long []durations = {13423,432344,23423562,23,324252,1132141414141L}; + assertEquals("0:00:13", Utils.getDurationBreakdown(durations[0])); + assertEquals("0:07:12", Utils.getDurationBreakdown(durations[1])); + assertEquals("6:30:23", Utils.getDurationBreakdown(durations[2])); + assertEquals("0:00:00", Utils.getDurationBreakdown(durations[3])); + assertEquals("0:05:24", Utils.getDurationBreakdown(durations[4])); + assertEquals("314483:43:34", Utils.getDurationBreakdown(durations[5])); + } + @Test + public void testGetPercentage() { + long []numerators = {10,20,30,40,50}; + long []denominators = {100,200,100,52,70}; + + assertEquals("10.00 %", Utils.getPercentage(numerators[0],denominators[0])); + assertEquals("10.00 %", Utils.getPercentage(numerators[1],denominators[1])); + assertEquals("30.00 %", Utils.getPercentage(numerators[2],denominators[2])); + assertEquals("76.92 %", Utils.getPercentage(numerators[3],denominators[3])); + assertEquals("71.43 %", Utils.getPercentage(numerators[4],denominators[4])); + assertEquals("NaN", Utils.getPercentage(0,0)); + } + + @Test + public void testGetDurationInGBHours() { + + long []durations = {10000, 213234343, 23424, 635322, 213}; + + assertEquals("0.003 GB Hours", Utils.getResourceInGBHours(durations[0])); + assertEquals("57.844 GB Hours", Utils.getResourceInGBHours(durations[1])); + assertEquals("0.006 GB Hours", Utils.getResourceInGBHours(durations[2])); + assertEquals("0.172 GB Hours", Utils.getResourceInGBHours(durations[3])); + assertEquals("0 GB Hours", Utils.getResourceInGBHours(durations[4])); + + } + +} diff --git a/test/common/DBTestUtil.java b/test/common/DBTestUtil.java new file mode 100644 index 000000000..3be603b78 --- /dev/null +++ b/test/common/DBTestUtil.java @@ -0,0 +1,52 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package common; + +import java.io.FileInputStream; +import java.io.IOException; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.Statement; +import org.apache.commons.io.IOUtils; +import play.db.DB; + +import static common.TestConstants.TEST_DATA_FILE; + + +public class DBTestUtil { + + public static void initDB() + throws IOException, SQLException { + String query = ""; + FileInputStream inputStream = new FileInputStream(TEST_DATA_FILE); + + try { + query = IOUtils.toString(inputStream); + } finally { + inputStream.close(); + } + + Connection connection = DB.getConnection(); + + try { + Statement statement = connection.createStatement(); + statement.execute(query); + } finally { + connection.close(); + } + } +} diff --git a/test/common/TestConstants.java b/test/common/TestConstants.java new file mode 100644 index 000000000..fbcf831b6 --- /dev/null +++ b/test/common/TestConstants.java @@ -0,0 +1,96 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package common; + +public class TestConstants { + + // Test data constants + public static final int TEST_SERVER_PORT = 9001; + public static final String BASE_URL = "http://localhost:" + TEST_SERVER_PORT; + public static final String TEST_DATA_FILE = "test/resources/test-init.sql"; + public static final int RESPONSE_TIMEOUT = 3000; // milliseconds + + public static final String TEST_JOB_ID1 = "application_1458194917883_1453361"; + public static final String TEST_JOB_ID2 = "application_1458194917883_1453362"; + public static final String TEST_JOB_NAME = "Email Overwriter"; + public static final String TEST_JOB_TYPE = "HadoopJava"; + public static final String TEST_APP_TYPE = "HadoopJava"; + public static final String TEST_USERNAME = "growth"; + public static final String TEST_DEFAULT_QUEUE_NAME = "default"; + + public static final String TEST_JOB_EXEC_ID1 = + "https://elephant.linkedin.com:8443/executor?execid=1654676&job=overwriter-reminder2&attempt=0"; + public static final String TEST_JOB_EXEC_ID2 = + "https://elephant.linkedin.com:8443/executor?execid=1654677&job=overwriter-reminder2&attempt=0"; + + public static final String TEST_FLOW_EXEC_ID1 = + "https://elephant.linkedin.com:8443/executor?execid=1654676"; + public static final String TEST_FLOW_EXEC_ID2 = + "https://elephant.linkedin.com:8443/executor?execid=1654677"; + public static final String TEST_FLOW_DEF_ID1 = + "https://elephant.linkedin.com:8443/manager?project=b2-confirm-email-reminder&flow=reminder"; + public static final String TEST_JOB_DEF_ID1 = + "https://elephant.linkedin.com:8443/manager?project=b2-confirm-email-reminder&flow=reminder&job=overwriter-reminder2"; + + public static final String TEST_START_TIME1 = "2016-04-18"; + public static final String TEST_END_TIME1 = "2016-04-19"; + // DB connection strings + public static final String DB_DEFAULT_DRIVER_KEY = "db.default.driver"; + public static final String DB_DEFAULT_DRIVER_VALUE = "org.h2.Driver"; + public static final String DB_DEFAULT_URL_KEY = "db.default.url"; + public static final String DB_DEFAULT_URL_VALUE = "jdbc:h2:mem:test;MODE=MySQL;"; + public static final String EVOLUTION_PLUGIN_KEY = "evolutionplugin"; + public static final String EVOLUTION_PLUGIN_VALUE = "enabled"; + public static final String APPLY_EVOLUTIONS_DEFAULT_KEY = "applyEvolutions.default"; + public static final String APPLY_EVOLUTIONS_DEFAULT_VALUE = "true"; + + // Paths to the rest end-points + public static final String REST_APP_RESULT_PATH = "/rest/job"; + public static final String REST_JOB_EXEC_RESULT_PATH = "/rest/jobexec"; + public static final String REST_FLOW_EXEC_RESULT_PATH = "/rest/flowexec"; + public static final String REST_SEARCH_PATH = "/rest/search"; + public static final String REST_COMPARE_PATH = "/rest/compare"; + public static final String REST_FLOW_GRAPH_DATA_PATH = "/rest/flowgraphdata"; + public static final String REST_JOB_GRAPH_DATA_PATH = "/rest/jobgraphdata"; + public static final String REST_JOB_METRICS_GRAPH_DATA_PATH = "/rest/jobmetricsgraphdata"; + public static final String REST_FLOW_METRICS_GRAPH_DATA_PATH = "/rest/flowmetricsgraphdata"; + public static final String REST_USER_RESOURCE_USAGE_PATH = "/rest/resourceusagedatabyuser"; + public static final String DEFAULT_ENCODING = "UTF-8"; + + //Paths to the UI rest end points + public static final String REST_WORKFLOW_SUMMARIES_PATH = "/rest/workflow-summaries"; + public static final String REST_JOB_SUMMARIES_PATH = "/rest/job-summaries"; + public static final String REST_APPLICATION_SUMMARIES_PATH = "/rest/application-summaries"; + public static final String REST_WORKFLOWS_PATH = "/rest/workflows"; + public static final String REST_JOBS_PATH = "/rest/jobs"; + public static final String REST_APPLICATIONS_PATH = "/rest/applications"; + public static final String REST_SEARCH_RESULTS = "/rest/search-results"; + + // Sample mapreduce constants + public static final String FILENAME_JOBCOUNTER = "mrdata/sampleJobCounter.properties"; + public static final String FILENAME_MAPPERTASK = "mrdata/mapperTaskCounter$.properties"; + public static final String FILENAME_REDUCERTASK = "mrdata/reducerTaskCounter$.properties"; + public static final String FILENAME_JOBCONF = "mrdata/sampleJobConf.properties"; + + public static final String TEST_TRACKING_URL = "http://hostname/jobhistory/job/job_1460381439677_0001"; + public static final String TEST_JOBCONF_NAME = "mapred.child.java.opts"; + public static final String TEST_JOBCONF_PATTERN = ".*."; + public static final String TEST_SCHEDULER = "azkaban"; + public static final long TEST_SCORE = 0; + public static final String TEST_SEVERITY = "NONE"; + +} diff --git a/test/common/TestUtil.java b/test/common/TestUtil.java new file mode 100644 index 000000000..a547c2a8c --- /dev/null +++ b/test/common/TestUtil.java @@ -0,0 +1,46 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package common; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Properties; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + + +public class TestUtil { + + private static final Logger logger = LoggerFactory.getLogger(TestUtil.class); + + // private on purpose + private TestUtil() {} + + public static Properties loadProperties(String filePath) + throws IOException { + Properties properties = new Properties(); + InputStream inputStream = TestUtil.class.getClassLoader().getResourceAsStream(filePath); + if (inputStream == null) { + logger.info("Configuation file not present in classpath. File: " + filePath); + throw new RuntimeException("Unable to read " + filePath); + } + properties.load(inputStream); + logger.info("Configuation file loaded. File: " + filePath); + return properties; + } + +} diff --git a/test/controllers/ApplicationTest.java b/test/controllers/ApplicationTest.java new file mode 100644 index 000000000..1aae11388 --- /dev/null +++ b/test/controllers/ApplicationTest.java @@ -0,0 +1,144 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package controllers; + +import com.avaje.ebean.Query; +import models.AppResult; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import play.api.mvc.Content; +import play.test.FakeApplication; +import play.test.Helpers; +import views.html.page.homePage; +import views.html.results.searchResults; + +import java.util.HashMap; +import java.util.Map; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +public class ApplicationTest { + + @Test + public void testRenderHomePage() { + Content html = homePage.render(5, 2, 3, searchResults.render("Latest analysis", null)); + assertEquals("text/html", html.contentType()); + assertTrue(html.body().contains("Hello there, I've been busy!")); + assertTrue(html.body().contains("I looked through 5 jobs today.")); + assertTrue(html.body().contains("About 2 of them could use some tuning.")); + assertTrue(html.body().contains("About 3 of them need some serious attention!")); + } + + @Test + public void testRenderSearch() { + Content html = searchResults.render("Latest analysis", null); + assertEquals("text/html", html.contentType()); + assertTrue(html.body().contains("Latest analysis")); + } + + public static FakeApplication app; + + @BeforeClass + public static void startApp() { + app = Helpers.fakeApplication(Helpers.inMemoryDatabase()); + Helpers.start(app); + } + + @AfterClass + public static void stopApp() { + Helpers.stop(app); + } + + @Test + public void testGenerateSearchQuery() { + + Map searchParams = new HashMap(); + + // Null searchParams Check + Query query1 = Application.generateSearchQuery("*", null); + assertNotNull(query1.findList()); + String sql1 = query1.getGeneratedSql(); + assertTrue(sql1.contains("select t0.id c0")); + assertTrue(sql1.contains("from yarn_app_result t0 order by t0.finish_time desc")); + + // No searchParams Check + Query query2 = Application.generateSearchQuery("*", searchParams); + assertNotNull(query2.findList()); + String sql2 = query2.getGeneratedSql(); + assertTrue(sql2.contains("select t0.id c0")); + assertTrue(sql2.contains("from yarn_app_result t0 order by t0.finish_time desc")); + + // Query by username + searchParams.put(Application.USERNAME, "username"); + query2 = Application.generateSearchQuery("*", searchParams); + assertNotNull(query2.findList()); + sql2 = query2.getGeneratedSql(); + assertTrue(sql2.contains("select t0.id c0")); + assertTrue(sql2.contains("from yarn_app_result t0 where")); + assertTrue(sql2.contains("t0.username = ? order by t0.finish_time desc")); + + // Query by queuename + searchParams.put(Application.QUEUE_NAME, "queueName"); + query2 = Application.generateSearchQuery("*", searchParams); + assertNotNull(query2.findList()); + sql2 = query2.getGeneratedSql(); + assertTrue(sql2.contains("select t0.id c0")); + assertTrue(sql2.contains("from yarn_app_result t0 where")); + assertTrue(sql2.contains("t0.queue_name = ? order by t0.finish_time desc")); + + + // Query by jobtype + searchParams.put(Application.JOB_TYPE, "Pig"); + query2 = Application.generateSearchQuery("*", searchParams); + assertNotNull(query2.findList()); + sql2 = query2.getGeneratedSql(); + assertTrue(sql2.contains("select t0.id c0")); + assertTrue(sql2.contains("from yarn_app_result t0 where")); + assertTrue(sql2.contains("t0.username = ?")); + assertTrue(sql2.contains("t0.job_type = ?")); + assertTrue(sql2.contains("order by t0.finish_time desc")); + + // Query by username, jobtype and start time + searchParams.put(Application.STARTED_TIME_BEGIN, "1459713751000"); + searchParams.put(Application.STARTED_TIME_END, "1459713751000"); + Query query3 = Application.generateSearchQuery("*", searchParams); + assertNotNull(query3.findList()); + String sql3 = query3.getGeneratedSql(); + assertTrue(sql3.contains("select t0.id c0")); + assertTrue(sql3.contains("from yarn_app_result t0 where")); + assertTrue(sql3.contains("t0.username = ?")); + assertTrue(sql3.contains("t0.start_time >= ?")); + assertTrue(sql3.contains("t0.start_time <= ?")); + assertTrue(sql3.contains("order by t0.start_time desc")); + + // Query by finish time + searchParams = new HashMap(); + searchParams.put(Application.FINISHED_TIME_BEGIN, "1459713751000"); + searchParams.put(Application.FINISHED_TIME_END, "1459713751000"); + Query query4 = Application.generateSearchQuery("*", searchParams); + assertNotNull(query4.findList()); + String sql4 = query4.getGeneratedSql(); + assertTrue(sql4.contains("select t0.id c0")); + assertTrue(sql4.contains("from yarn_app_result t0 where")); + assertTrue(sql4.contains("t0.finish_time >= ?")); + assertTrue(sql4.contains("t0.finish_time <= ?")); + assertTrue(sql4.contains("order by t0.finish_time desc")); + } +} diff --git a/test/job-history/get_jobhistory_hdfs.py b/test/job-history/get_jobhistory_hdfs.py new file mode 100644 index 000000000..349e01bd7 --- /dev/null +++ b/test/job-history/get_jobhistory_hdfs.py @@ -0,0 +1,795 @@ +#!/bin/env python2 +import argparse +import base64 +import lxml.etree as etree +import hashlib +import operator +import os +import json +import re +import shutil +import subprocess +import sys +import tempfile +import textwrap +import types + + +HDFS_CMD = "hdfs" +NUMBER_TYPES = (types.IntType, types.LongType, types.FloatType, types.ComplexType) +STRING_TYPES = (types.StringType, types.UnicodeType) + + +def _make_set(per_line_items): + """Make a set out of a string of arguments. + + @param str per_line_items: A newline separated string. + @rtype: set[str] + """ + ret = set() + for item in per_line_items.split("\n"): + if item == "": + continue + ret.add(item) + return ret + + +SPARK_SAFE_PARENTS = _make_set(""" +App ID +Data Read Method +Event +Executor ID +Java Home +Java Version +Locality +Name +Reason +Result +Scala Version +Scope +Spark Version +Task Type +awt.toolkit +file.encoding +file.encoding.pkg +file.separator +java.awt.graphicsenv +java.awt.printerjob +java.class.version +java.endorsed.dirs +java.home +java.io.tmpdir +java.library.path +java.runtime.name +java.runtime.version +java.specification.name +java.specification.vendor +java.specification.version +java.vendor +java.vendor.url +java.vendor.url.bug +java.version +java.vm.info +java.vm.name +java.vm.specification.name +java.vm.specification.vendor +java.vm.specification.version +java.vm.vendor +java.vm.version +line.separator +os.arch +os.name +os.version +path.separator +spark.app.id +spark.driver.memory +spark.executor.id +spark.executor.memory +spark.extraListeners +spark.hadoop.fs.file.impl +spark.hadoop.fs.hdfs.impl +spark.master +spark.metrics.conf.*.sink.pp.class +spark.metrics.conf.*.source.jvm.class +spark.rdd.scope +spark.scheduler.mode +spark.submit.deployMode +spark.ui.filters +spark.yarn.dist.files +sun.boot.class.path +sun.boot.library.path +sun.cpu.endian +sun.cpu.isalist +sun.io.unicode.encoding +sun.java.launcher +sun.jnu.encoding +sun.management.compiler +sun.nio.ch.bugLevel +sun.os.patch.level +""") +SPARK_SAFE_VALUES = _make_set(""" +System Classpath +false +true +""") +MAPREDUCE_SAFE_PARENTS = _make_set(""" +taskid +state +containerId +attemptId +applicationAttemptId +string +splitLocations +jobid +""") +MAPREDUCE_SAFE_VALUES = _make_set(""" +AMStarted +AM_STARTED +BYTES_READ +BYTES_WRITTEN +CLEANUP_ATTEMPT_FAILED +CLEANUP_ATTEMPT_FINISHED +CLEANUP_ATTEMPT_KILLED +CLEANUP_ATTEMPT_STARTED +COMMITTED_HEAP_BYTES +COUNTERS +CPU time spent (ms) +CPU_MILLISECONDS +FAILED_SHUFFLE +FILE: Number of bytes read +FILE: Number of bytes written +FILE: Number of large read operations +FILE: Number of read operations +FILE: Number of write operations +FILE_BYTES_READ +FILE_BYTES_WRITTEN +FILE_LARGE_READ_OPS +FILE_READ_OPS +FILE_WRITE_OPS +GC time elapsed (ms) +GC_TIME_MILLIS +HDFS: Number of bytes read +HDFS: Number of bytes written +HDFS: Number of large read operations +HDFS: Number of read operations +HDFS: Number of write operations +HDFS_BYTES_READ +HDFS_BYTES_WRITTEN +HDFS_LARGE_READ_OPS +HDFS_READ_OPS +HDFS_WRITE_OPS +INITED +JOB_ERROR +JOB_FAILED +JOB_FINISHED +JOB_INFO_CHANGED +JOB_INITED +JOB_KILLED +JOB_PRIORITY_CHANGED +JOB_QUEUE_CHANGED +JOB_STATUS_CHANGED +JOB_SUBMITTED +KILLED +MAP +MAP_ATTEMPT_FAILED +MAP_ATTEMPT_FINISHED +MAP_ATTEMPT_KILLED +MAP_ATTEMPT_STARTED +MAP_COUNTERS +MAP_INPUT_RECORDS +MAP_OUTPUT_RECORDS +MB_MILLIS_MAPS +MERGED_MAP_OUTPUTS +MILLIS_MAPS +NORMALIZED_RESOURCE +NUM_KILLED_MAPS +OFF_SWITCH +OTHER_LOCAL_MAPS +PHYSICAL_MEMORY_BYTES +RECORDS_WRITTEN +REDUCE_ATTEMPT_FAILED +REDUCE_ATTEMPT_FINISHED +REDUCE_ATTEMPT_KILLED +REDUCE_ATTEMPT_STARTED +REDUCE_COUNTERS +SETUP_ATTEMPT_FAILED +SETUP_ATTEMPT_FINISHED +SETUP_ATTEMPT_KILLED +SETUP_ATTEMPT_STARTED +SLOTS_MILLIS_MAPS +SPECULATIVE +SPILLED_RECORDS +SPLIT_RAW_BYTES +SUCCEEDED +TASK_FAILED +TASK_FINISHED +TASK_STARTED +TASK_UPDATED +TOTAL_COUNTERS +TOTAL_LAUNCHED_MAPS +VCORES_MILLIS_MAPS +VIRGIN +VIRTUAL_MEMORY_BYTES +Bytes Read +Bytes Written +Failed Shuffles +File System Counters +Input split bytes +Killed map tasks +Launched map tasks +Map input records +Map output records +Map-Reduce Framework +Merged Map outputs +Other local map tasks +Physical memory (bytes) snapshot +Spilled Records +Total committed heap usage (bytes) +Total megabyte-seconds taken by all map tasks +Total time spent by all map tasks (ms) +Total time spent by all maps in occupied slots (ms) +Total vcore-seconds taken by all map tasks +Virtual memory (bytes) snapshot +org.apache.hadoop.examples.RandomWriter$Counters +string +EventType +Event +JhCounterGroup +JhCounter +JhCounters +JobFinished +JobInfoChange +JobInited +JobPriorityChange +JobQueueChange +JobStatusChanged +JobSubmitted +JobUnsuccessfulCompletion +MapAttemptFinished +ReduceAttemptFinished +TaskAttemptFinished +TaskAttemptStarted +TaskAttemptUnsuccessfulCompletion +TaskFailed +TaskFinished +TaskStarted +TaskUpdated +acls +applicationAttemptId +attemptId +avataar +clockSplits +containerId +counters +counts +cpuUsages +diagnostics +displayName +error +event +failedDueToAttempt +failedMaps +failedReduces +finishTime +finishedMaps +finishedReduces +groups +hostname +httpPort +jobConfPath +jobName +jobQueueName +jobStatus +jobid +launchTime +locality +mapCounters +mapFinishTime +name +nodeManagerHost +nodeManagerHttpPort +nodeManagerPort +org.apache.hadoop.mapreduce.FileSystemCounter +org.apache.hadoop.mapreduce.JobCounter +org.apache.hadoop.mapreduce.TaskCounter +org.apache.hadoop.mapreduce.jobhistory +org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter +org.apache.hadoop.mapreduce.lib.output.FileOutputFormatCounter +physMemKbytes +port +priority +rackname +reduceCounters +shuffleFinishTime +shufflePort +sortFinishTime +splitLocations +startTime +state +status +submitTime +successfulAttemptId +taskStatus +taskType +taskid +totalCounters +totalMaps +totalReduces +trackerName +type +uberized +userName +vMemKbytes +value +workflowAdjacencies +workflowId +workflowName +workflowNodeName +workflowTags +array +boolean +enum +int +long +map +record +default +null +""") +# Add a few that have extra trailing spaces that may be confusingly removed by IDEs. +MAPREDUCE_SAFE_VALUES.add("File Input Format Counters ") +MAPREDUCE_SAFE_VALUES.add("File Output Format Counters ") +MAPREDUCE_SAFE_VALUES.add("Job Counters ") +XML_CONF_SAFE_KEYS = _make_set(""" +dfs.datanode.shared.file.descriptor.paths +dfs.domain.socket.path +hadoop.security.random.device.file.path +yarn.application.classpath +yarn.resourcemanager.ha.automatic-failover.zk-base-path +yarn.resourcemanager.zk-state-store.parent-path +yarn.timeline-service.leveldb-timeline-store.path +ha.zookeeper.acl +ha.zookeeper.parent-znode +hadoop.common.configuration.version +hadoop.http.authentication.type +hadoop.http.filter.initializers +hadoop.rpc.socket.factory.class.default +hadoop.security.crypto.codec.classes.aes.ctr.nopadding +hadoop.ssl.keystores.factory.class +hadoop.kerberos.kinit.command +io.compression.codec.bzip2.library +io.seqfile.local.dir +io.serializations +map.sort.class +mapreduce.job.inputformat.class +mapreduce.job.map.class +mapreduce.job.map.output.collector.class +mapreduce.job.output.key.class +mapreduce.job.output.value.class +mapreduce.job.outputformat.class +mapreduce.job.reduce.class +mapreduce.job.reduce.shuffle.consumer.plugin.class +mapreduce.jobhistory.recovery.store.class +mapreduce.local.clientfactory.class.name +net.topology.impl +net.topology.node.switch.mapping.impl +net.topology.script.file.name +nfs.dump.dir +nfs.exports.allowed.hosts +rpc.engine.org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB +rpc.engine.org.apache.hadoop.ipc.ProtocolMetaInfoPB +rpc.engine.org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB +yarn.ipc.rpc.class +yarn.nodemanager.aux-services.mapreduce_shuffle.class +yarn.nodemanager.aux-services.spark_shuffle.class +yarn.nodemanager.container-executor.class +yarn.nodemanager.linux-container-executor.resources-handler.class +yarn.resourcemanager.configuration.provider-class +yarn.resourcemanager.scheduler.class +yarn.resourcemanager.store.class +yarn.timeline-service.store-class +mapreduce.framework.name +yarn.app.mapreduce.am.command-opts +yarn.app.mapreduce.am.staging-dir +yarn.client.failover-proxy-provider +yarn.http.policy +yarn.nodemanager.admin-env +yarn.nodemanager.aux-services +yarn.nodemanager.docker-container-executor.exec-name +yarn.nodemanager.env-whitelist +yarn.nodemanager.linux-container-executor.cgroups.hierarchy +yarn.nodemanager.linux-container-executor.nonsecure-mode.local-user +yarn.nodemanager.linux-container-executor.nonsecure-mode.user-pattern +yarn.nodemanager.local-dirs +yarn.nodemanager.log-dirs +yarn.nodemanager.recovery.dir +yarn.nodemanager.remote-app-log-dir-suffix +yarn.nodemanager.remote-app-log-dir +yarn.nodemanager.resourcemanager.minimum.version +yarn.resourcemanager.fs.state-store.retry-policy-spec +yarn.resourcemanager.fs.state-store.uri +yarn.resourcemanager.nodemanager.minimum.version +yarn.resourcemanager.scheduler.monitor.policies +yarn.resourcemanager.state-store.max-completed-applications +yarn.resourcemanager.zk-acl +yarn.timeline-service.http-authentication.type +hadoop.security.group.mapping +hadoop.registry.jaas.context +hadoop.registry.system.acls +hadoop.registry.zk.root +hadoop.rpc.protection +hadoop.security.authentication +hadoop.security.crypto.cipher.suite +hadoop.security.java.secure.random.algorithm +hadoop.ssl.client.conf +hadoop.ssl.enabled.protocols +hadoop.ssl.server.conf +hadoop.util.hash.type +fs.AbstractFileSystem.file.impl +fs.AbstractFileSystem.har.impl +fs.AbstractFileSystem.hdfs.impl +fs.AbstractFileSystem.viewfs.impl +fs.s3a.impl +fs.swift.impl +dfs.client.https.keystore.resource +dfs.http.policy +dfs.https.server.keystore.resource +dfs.image.compression.codec +dfs.namenode.edits.journal-plugin.qjournal +dfs.namenode.top.windows.minutes +dfs.webhdfs.user.provider.user.pattern +mapreduce.client.output.filter +mapreduce.jobhistory.http.policy +mapreduce.jobhistory.jhist.format +mapreduce.jobtracker.instrumentation +mapreduce.map.log.level +mapreduce.map.output.compress.codec +mapreduce.output.fileoutputformat.compress.codec +mapreduce.output.fileoutputformat.compress.type +mapreduce.reduce.log.level +mapreduce.reduce.shuffle.fetch.retry.enabled +mapreduce.task.profile.map.params +mapreduce.task.profile.maps +mapreduce.task.profile.params +mapreduce.task.profile.reduce.params +mapreduce.task.profile.reduces +mapreduce.tasktracker.instrumentation +mapreduce.tasktracker.taskcontroller +""") +XML_CONF_SAFE_VALUES = _make_set(""" +true +false +* +none +localhost +localhost:2181 +default +DEFAULT +""") +XML_CONF_SAFE_VALUES.add(" ") + + +def _parse_args(): + """Parse command line args.""" + usage_str = textwrap.dedent(""" + Use this program to get a snapshot of MapReduce & Spark history data to a + local tarball. It should be called as a user that has access to the history + directories. Optionally, anonymization can be applied which will replace + sensitive data (hostnames, usernames, ...) with base64'd SHA-256 hashes of + the same data. + + Usage examples: + + # Do a basic run, getting the last 1000 MR + Spark jobs. + get_jobhistory_hdfs.py -o /tmp/jobhistory.tgz + + # Get the last 20 jobs, with anonymization. + get_jobhistory_hdfs.py -o /tmp/jobhistory.tgz -c 20 -a + + # Basic run, but running as the HDFS user for permission reasons. + sudo -u hdfs ./get_jobhistory_hdfs -o /tmp/jobhistory.tgz + """) + parser = argparse.ArgumentParser(description=usage_str, + formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument("-c", "--count", help="How many recent jobs to retrieve", type=int, + default=1000) + parser.add_argument("-d", "--mr-dir", help="History dir for mapreduce", + default="/user/history/done") + parser.add_argument("-s", "--spark-dir", help="History dir for spark", + default="/user/spark/applicationHistory") + parser.add_argument("-o", "--output-tarball", help="Output tarball name", + required=True) + parser.add_argument("-a", "--anonymize", action="store_true", help="Anonymize output") + return parser.parse_args() + + +def _get_recent_mapreduce_history_filenames(mapreduce_history_dir, count): + """Get recent mapreduce history files. + + @param str mapreduce_history_dir: The directory in HDFS that contains mapreduce history files. + @param int count: How many files to retrieve. + @rtype: collections.Iterable[str] + + Returns an iterable of file paths. + """ + # Note: the * 2 in the below is because there are two files per job. + return sorted(_yield_mapreduce_history_files(mapreduce_history_dir))[-count * 2:] + + +def _yield_mapreduce_history_files(mapreduce_history_dir): + """Get all mapreduce history dirs. + + @param str mapreduce_history_dir: The directory in HDFS that contains mapreduce history files. + @rtype: collections.Iterable[str] + + Returns an iterable of file paths. + """ + output = subprocess.check_output(["hdfs", "dfs", "-find", mapreduce_history_dir]) + for line in output.split("\n"): + if len(line) == 0: + continue + dirname, filename = os.path.split(line) + if not filename.startswith("job_"): + continue + yield line + + +def _get_recent_spark_history_filenames(spark_history_dir, count): + """Get recent spark history files. + + @param str spark_history_dir: The directory in HDFS that contains spark history files. + @param int count: How many files to retrieve. + @rtype: collections.Iterable[str] + + Returns an iterable of file paths. + """ + return (x[0] for x in + sorted(_yield_spark_history_files_and_times(spark_history_dir), + key=operator.itemgetter(1))[-count:]) + + +def _yield_spark_history_files_and_times(spark_history_dir): + """Get all spark history files and their times. + + @param str spark_history_dir: The directory in HDFS that contains spark history files. + @rtype: collections.Iterable[(str, str)] + + Returns an iterable of timestamp and file paths. + """ + output = subprocess.check_output(["hdfs", "dfs", "-ls", spark_history_dir + "/"]) + # ls output line: + # -rwxrwx--- 3 prod spark 76762 2017-01-28 10:55 /user/spark/applicationHisto... + line_regex = re.compile(r"^.*? (\d{4}-\d{2}-\d{2} \d{2}:\d{2}) (.*)$") + for line in output.split("\n"): + if len(line) == 0: + continue + if line.startswith("Found"): + continue + m = line_regex.match(line) + if not m: + print >> sys.stderr, "Couldn't parse jobhistory line {}".format(line) + else: + yield m.group(2), m.group(1) + + +def _get_hdfs_file(hdfs_path, local_dir): + """Get an hdfs file to a local path. + + @param str hdfs_path: The path on the HDFS to the file. + @param str local_dir: The local path where the file should be saved. + """ + print " Retrieving {}".format(hdfs_path) + subprocess.check_call(["hdfs", "dfs", "-get", hdfs_path, local_dir]) + + +def _anonymize_spark_file(path): + """Anonymize a spark job history file. + + @param str path: The local path to the file. + """ + with open(path) as f: + input_lines = f.readlines() + with open(path, "w") as f: + for input_line in input_lines: + data = json.loads(input_line) + data = _anonymize_spark_data(data) + print >>f, json.dumps(data) + + +def _anonymize_spark_data(data, parent_info=None): + """Anonymize a decoded json spark history object. + + @param object data: An object that may contain structure in dict or lists or be a scalar. + @param parent_info data: A parent information object, sometimes useful for anonymization + + @rtype: object + + Returns an anonymized version of the same structure. + """ + if isinstance(data, dict): + # TODO: Ensure key for value Added by User still gets anonymized. + return {k: _anonymize_spark_data(v, k) for k, v in data.iteritems()} + elif isinstance(data, list): + return [_anonymize_spark_data(v) for v in data] + elif isinstance(data, NUMBER_TYPES): + return data + elif isinstance(data, STRING_TYPES): + if parent_info in SPARK_SAFE_PARENTS: + return data + if data in SPARK_SAFE_VALUES: + return data + try: + int(data) + # Must be an integer + return data + except ValueError: + # Must not have been an integer + pass + return _anonymize_str(data) + else: + raise NotImplementedError("Unknown anonymization type {}".format(data)) + + +def _anonymize_mapreduce_file(path): + """Anonymize a mapreduce history file. + + @param str path: The local path to the file. + """ + with open(path) as f: + input_lines = f.readlines() + with open(path, "w") as f: + print >>f, input_lines[0].rstrip() + for input_line in input_lines[1:]: + if input_line != "\n": + data = json.loads(input_line) + data = _anonymize_mapreduce_data(data) + print >>f, json.dumps(data) + else: + print >>f, "" + + +def _anonymize_mapreduce_data(data, parent_info=None): + """Anonymize a decoded json mapreduce job history object. + + @param object data: An object that may contain structure in dict or lists or be a scalar. + @param parent_info data: A parent information object, sometimes useful for anonymization + + @rtype: object + + Returns an anonymized version of the same structure. + """ + if isinstance(data, dict): + return {k: _anonymize_mapreduce_data(v, k) for k, v in data.iteritems()} + elif isinstance(data, list): + return [_anonymize_mapreduce_data(v) for v in data] + elif isinstance(data, NUMBER_TYPES): + return data + elif isinstance(data, STRING_TYPES): + if parent_info in MAPREDUCE_SAFE_PARENTS: + return data + if data in MAPREDUCE_SAFE_VALUES: + return data + if _is_str_number(data): + return data + return _anonymize_str(data) + elif data is None: + return None + else: + raise NotImplementedError("Unknown anonymization type [{}]".format(data)) + + +def _anonymize_xml_conf_file(path): + """Anonymize an configuration (xml) file. + + @param str path: The local path to the file. + """ + tree = etree.parse(path) + root = tree.getroot() + for sub_element in root: + if sub_element.find("name") is None: + continue + name = sub_element.find("name").text + value = sub_element.find("value").text + if _is_str_number(value): + continue + if name in XML_CONF_SAFE_KEYS: + continue + if value in XML_CONF_SAFE_VALUES: + continue + new_value = _anonymize_str(value) + sub_element.find("value").text = new_value + tree.write(path) + + +def _anonymize_str(plaintext): + """Anonymize a given string. + + @param str plaintext: The plaintext string to anonymize. + @rtype: str + """ + hasher = hashlib.sha256() + hasher.update(plaintext) + digest = hasher.digest() + b64hashtext = base64.urlsafe_b64encode(digest) + return b64hashtext[:len(plaintext)] + + +def _is_str_number(s): + """Return if a given string is a number. + + @param str s: The string to test. + @rtype: bool + """ + try: + float(s) + return True + except ValueError: + pass + if s[-1] in ('d', 'f'): + return _is_str_number(s[:-1]) + return False + + +def _tar_dir(local_dir, output_tarball): + """Compress a dir into a tarball. + + @param str local_dir: The local directory to put into the tarball. + @param str output_dirball: The path to the file to output. + """ + current_dir = os.getcwd() + output_tarball_absolute = os.path.join(current_dir, output_tarball) + try: + os.chdir(local_dir) + subprocess.check_call(["tar", "czf" if output_tarball.endswith("gz") else "cf", + output_tarball_absolute, "."]) + finally: + os.chdir(current_dir) + print "Wrote output tarball {}".format(output_tarball) + + +def _test_create_file(path): + """Test that a given path can be created. + + @param str path: The path to try to create. + """ + with open(path, "w"): + pass + os.unlink(path) + + +def _main(): + """Main function, separate for testing.""" + args = _parse_args() + # Fail fast on permission errors for creating the output tarball. + _test_create_file(args.output_tarball) + tmpdir = tempfile.mkdtemp() + try: + local_spark_dir = os.path.join(tmpdir, "spark") + os.makedirs(local_spark_dir) + print "Retrieving {} Spark jobs into {}".format(args.count, local_spark_dir) + for history_file in _get_recent_spark_history_filenames(args.spark_dir, args.count): + _get_hdfs_file(history_file, local_spark_dir) + if args.anonymize: + local_filename = os.path.join(local_spark_dir, os.path.basename(history_file)) + _anonymize_spark_file(local_filename) + + local_mapreduce_dir = os.path.join(tmpdir, "mapreduce") + os.makedirs(local_mapreduce_dir) + print "Retrieving {} MapReduce jobs into {}".format(args.count, local_spark_dir) + for history_file in _get_recent_mapreduce_history_filenames(args.mr_dir, args.count): + _get_hdfs_file(history_file, local_mapreduce_dir) + if args.anonymize: + local_filename = os.path.join(local_mapreduce_dir, os.path.basename(history_file)) + if local_filename.endswith(".jhist"): + _anonymize_mapreduce_file(local_filename) + else: + _anonymize_xml_conf_file(local_filename) + + _tar_dir(tmpdir, args.output_tarball) + finally: + shutil.rmtree(tmpdir) + return 0 + + +if __name__ == "__main__": + sys.exit(_main()) diff --git a/test/resources/SchedulerConf.xml b/test/resources/SchedulerConf.xml new file mode 100644 index 000000000..c4a29f5e3 --- /dev/null +++ b/test/resources/SchedulerConf.xml @@ -0,0 +1,42 @@ + + + + + + + + airflow + com.linkedin.drelephant.schedulers.AirflowScheduler + + http://localhost:8000 + + + + + azkaban + com.linkedin.drelephant.schedulers.AzkabanScheduler + + + + oozie + com.linkedin.drelephant.schedulers.OozieScheduler + + http://oozie/ + + + + diff --git a/test/resources/configurations/fetcher/FetcherConfTest1.xml b/test/resources/configurations/fetcher/FetcherConfTest1.xml new file mode 100644 index 000000000..e69e71c6b --- /dev/null +++ b/test/resources/configurations/fetcher/FetcherConfTest1.xml @@ -0,0 +1,27 @@ + + + + + + mapreduce + com.linkedin.drelephant.mapreduce.fetchers.MapReduceFetcherHadoop2 + + + spark + org.apache.spark.deploy.history.SparkFSFetcher + + diff --git a/test/resources/configurations/fetcher/FetcherConfTest10.xml b/test/resources/configurations/fetcher/FetcherConfTest10.xml new file mode 100644 index 000000000..ec593f782 --- /dev/null +++ b/test/resources/configurations/fetcher/FetcherConfTest10.xml @@ -0,0 +1,28 @@ + + + + + + mapreduce + com.linkedin.drelephant.mapreduce.fetchers.MapReduceFSFetcherHadoop2 + + true + 200 + PST + + + diff --git a/test/resources/configurations/fetcher/FetcherConfTest11.xml b/test/resources/configurations/fetcher/FetcherConfTest11.xml new file mode 100644 index 000000000..432d731a6 --- /dev/null +++ b/test/resources/configurations/fetcher/FetcherConfTest11.xml @@ -0,0 +1,27 @@ + + + + + + mapreduce + com.linkedin.drelephant.mapreduce.fetchers.MapReduceFSFetcherHadoop2 + + + + + + diff --git a/test/resources/configurations/fetcher/FetcherConfTest2.xml b/test/resources/configurations/fetcher/FetcherConfTest2.xml new file mode 100644 index 000000000..b025abe0f --- /dev/null +++ b/test/resources/configurations/fetcher/FetcherConfTest2.xml @@ -0,0 +1,27 @@ + + + + + + mapreduce + com.linkedin.drelephant.mapreduce.fetchers.MapReduceFetcherHadoop2 + + + spark + org.apache.spark.deploy.history.SparkFSFetcher + + diff --git a/test/resources/configurations/fetcher/FetcherConfTest3.xml b/test/resources/configurations/fetcher/FetcherConfTest3.xml new file mode 100644 index 000000000..46abe5bc1 --- /dev/null +++ b/test/resources/configurations/fetcher/FetcherConfTest3.xml @@ -0,0 +1,23 @@ + + + + + + mapreduce + + + diff --git a/test/resources/configurations/fetcher/FetcherConfTest4.xml b/test/resources/configurations/fetcher/FetcherConfTest4.xml new file mode 100644 index 000000000..bcb7ebe29 --- /dev/null +++ b/test/resources/configurations/fetcher/FetcherConfTest4.xml @@ -0,0 +1,22 @@ + + + + + + com.linkedin.drelephant.mapreduce.fetchers.MapReduceFetcherHadoop2 + + diff --git a/test/resources/configurations/fetcher/FetcherConfTest5.xml b/test/resources/configurations/fetcher/FetcherConfTest5.xml new file mode 100644 index 000000000..2372f0828 --- /dev/null +++ b/test/resources/configurations/fetcher/FetcherConfTest5.xml @@ -0,0 +1,25 @@ + + + + + + spark + com.linkedin.drelephant.spark.fetchers.SparkFetcher + + + + diff --git a/test/resources/configurations/fetcher/FetcherConfTest9.xml b/test/resources/configurations/fetcher/FetcherConfTest9.xml new file mode 100644 index 000000000..ef7c2c0be --- /dev/null +++ b/test/resources/configurations/fetcher/FetcherConfTest9.xml @@ -0,0 +1,23 @@ + + + + + + mapreduce + com.linkedin.drelephant.mapreduce.fetchers.MapReduceFSFetcherHadoop2 + + diff --git a/test/resources/configurations/heuristic/HeuristicConfTest1.xml b/test/resources/configurations/heuristic/HeuristicConfTest1.xml new file mode 100644 index 000000000..47f4e94f6 --- /dev/null +++ b/test/resources/configurations/heuristic/HeuristicConfTest1.xml @@ -0,0 +1,48 @@ + + + + + + + + + mapreduce + Mapper Data Skew + com.linkedin.drelephant.mapreduce.heuristics.MapperDataSkewHeuristic + views.html.help.mapreduce.helpMapperDataSkew + + 10, 50, 100, 200 + 2, 4, 8, 16 + 1/8, 1/4, 1/2, 1 + + + + + mapreduce + Mapper GC + com.linkedin.drelephant.mapreduce.heuristics.MapperGCHeuristic + views.html.help.mapreduce.helpGC + + + + mapreduce + Mapper Time + com.linkedin.drelephant.mapreduce.heuristics.MapperTimeHeuristic + views.html.help.mapreduce.helpMapperTime + + + \ No newline at end of file diff --git a/test/resources/configurations/heuristic/HeuristicConfTest2.xml b/test/resources/configurations/heuristic/HeuristicConfTest2.xml new file mode 100644 index 000000000..acb1822bd --- /dev/null +++ b/test/resources/configurations/heuristic/HeuristicConfTest2.xml @@ -0,0 +1,48 @@ + + + + + + + + + mapreduce + Mapper Data Skew + com.linkedin.drelephant.mapreduce.heuristics.MapperDataSkewHeuristic + views.html.help.mapreduce.helpMapperDataSkew + + 10, 50, 100, 200 + 2, 4, 8, 16 + 1/8, 1/4, 1/2, 1 + + + + + mapreduce + Mapper GC + com.linkedin.drelephant.mapreduce.heuristics.MapperGCHeuristic + views.html.help.mapreduce.helpGC + + + + mapreduce + Mapper Time + com.linkedin.drelephant.mapreduce.heuristics.MapperTimeHeuristic + views.html.help.mapreduce.helpMapperTime + + + diff --git a/test/resources/configurations/heuristic/HeuristicConfTest3.xml b/test/resources/configurations/heuristic/HeuristicConfTest3.xml new file mode 100644 index 000000000..2af761905 --- /dev/null +++ b/test/resources/configurations/heuristic/HeuristicConfTest3.xml @@ -0,0 +1,48 @@ + + + + + + + + + mapreduce + Mapper Data Skew + com.linkedin.drelephant.mapreduce.heuristics.MapperDataSkewHeuristic + views.html.help.mapreduce.helpMapperDataSkew + + 10, 50, 100, 200 + 2, 4, 8, 16 + 1/8, 1/4, 1/2, 1 + + + + + mapreduce + Mapper GC + com.linkedin.drelephant.mapreduce.heuristics.MapperGCHeuristic + views.html.help.mapreduce.helpGC + + + + mapreduce + Mapper Time + com.linkedin.drelephant.mapreduce.heuristics.MapperTimeHeuristic + views.html.help.mapreduce.helpMapperTime + + + \ No newline at end of file diff --git a/test/resources/configurations/heuristic/HeuristicConfTest4.xml b/test/resources/configurations/heuristic/HeuristicConfTest4.xml new file mode 100644 index 000000000..7aae55de1 --- /dev/null +++ b/test/resources/configurations/heuristic/HeuristicConfTest4.xml @@ -0,0 +1,48 @@ + + + + + + + + + mapreduce + Mapper Data Skew + com.linkedin.drelephant.mapreduce.heuristics.MapperDataSkewHeuristic + views.html.help.mapreduce.helpMapperDataSkew + + 10, 50, 100, 200 + 2, 4, 8, 16 + 1/8, 1/4, 1/2, 1 + + + + + mapreduce + Mapper GC + com.linkedin.drelephant.mapreduce.heuristics.MapperGCHeuristic + views.html.help.mapreduce.helpGC + + + + mapreduce + Mapper Time + com.linkedin.drelephant.mapreduce.heuristics.MapperTimeHeuristic + views.html.help.mapreduce.helpMapperTime + + + \ No newline at end of file diff --git a/test/resources/configurations/heuristic/HeuristicConfTest5.xml b/test/resources/configurations/heuristic/HeuristicConfTest5.xml new file mode 100644 index 000000000..358a652e0 --- /dev/null +++ b/test/resources/configurations/heuristic/HeuristicConfTest5.xml @@ -0,0 +1,47 @@ + + + + + + + + + mapreduce + Mapper Data Skew + com.linkedin.drelephant.mapreduce.heuristics.MapperDataSkewHeuristic + views.html.help.mapreduce.helpMapperDataSkew + + 10, 50, 100, 200 + 2, 4, 8, 16 + 1/8, 1/4, 1/2, 1 + + + + + Mapper GC + com.linkedin.drelephant.mapreduce.heuristics.MapperGCHeuristic + views.html.help.mapreduce.helpGC + + + + mapreduce + Mapper Time + com.linkedin.drelephant.mapreduce.heuristics.MapperTimeHeuristic + views.html.help.mapreduce.helpMapperTime + + + \ No newline at end of file diff --git a/test/resources/configurations/jobtype/JobTypeConfTest1.xml b/test/resources/configurations/jobtype/JobTypeConfTest1.xml new file mode 100644 index 000000000..aaff92137 --- /dev/null +++ b/test/resources/configurations/jobtype/JobTypeConfTest1.xml @@ -0,0 +1,46 @@ + + + + + + Spark + spark + spark.app.id + + + + Pig + mapreduce + pig.script + + + Hive + mapreduce + hive.mapred.mode + + + Cascading + mapreduce + cascading.app.frameworks + + + HadoopJava + mapreduce + mapred.child.java.opts + + + diff --git a/test/resources/configurations/jobtype/JobTypeConfTest2.xml b/test/resources/configurations/jobtype/JobTypeConfTest2.xml new file mode 100644 index 000000000..760a11997 --- /dev/null +++ b/test/resources/configurations/jobtype/JobTypeConfTest2.xml @@ -0,0 +1,46 @@ + + + + + + + Spark + spark + spark.app.id + + + + Pig + mapreduce + pig.script + + + mapreduce + hive.mapred.mode + + + Cascading + mapreduce + cascading.app.frameworks + + + HadoopJava + mapreduce + mapred.child.java.opts + + + \ No newline at end of file diff --git a/test/resources/configurations/jobtype/JobTypeConfTest3.xml b/test/resources/configurations/jobtype/JobTypeConfTest3.xml new file mode 100644 index 000000000..9da172cd7 --- /dev/null +++ b/test/resources/configurations/jobtype/JobTypeConfTest3.xml @@ -0,0 +1,45 @@ + + + + + + + Spark + spark + + + + Pig + mapreduce + + + Hive + mapreduce + hive.mapred.mode + + + Cascading + mapreduce + cascading.app.frameworks + + + HadoopJava + mapreduce + mapred.child.java.opts + + + \ No newline at end of file diff --git a/test/resources/configurations/jobtype/JobTypeConfTest4.xml b/test/resources/configurations/jobtype/JobTypeConfTest4.xml new file mode 100644 index 000000000..b02b767ad --- /dev/null +++ b/test/resources/configurations/jobtype/JobTypeConfTest4.xml @@ -0,0 +1,45 @@ + + + + + + + Spark + spark + spark.app.id + + + + Pig + pig.script + + + Hive + hive.mapred.mode + + + Cascading + mapreduce + cascading.app.frameworks + + + HadoopJava + mapreduce + mapred.child.java.opts + + + \ No newline at end of file diff --git a/test/resources/configurations/jobtype/JobTypeConfTest5.xml b/test/resources/configurations/jobtype/JobTypeConfTest5.xml new file mode 100644 index 000000000..b1f7953ae --- /dev/null +++ b/test/resources/configurations/jobtype/JobTypeConfTest5.xml @@ -0,0 +1,26 @@ + + + + + + Voldemort + mapreduce + mapred.reducer.class + [(voldemort) + + + diff --git a/test/resources/configurations/jobtype/JobTypeConfTest6.xml b/test/resources/configurations/jobtype/JobTypeConfTest6.xml new file mode 100644 index 000000000..7878dfaaf --- /dev/null +++ b/test/resources/configurations/jobtype/JobTypeConfTest6.xml @@ -0,0 +1,48 @@ + + + + + + Spark + spark + spark.app.id + + + + Pig + mapreduce + pig.script + + + + Hive + mapreduce + hive.mapred.mode + + + + Cascading + mapreduce + cascading.app.frameworks + + + HadoopJava + mapreduce + mapred.child.java.opts + + + diff --git a/test/resources/configurations/scheduler/SchedulerConfTest1.xml b/test/resources/configurations/scheduler/SchedulerConfTest1.xml new file mode 100644 index 000000000..ac710874c --- /dev/null +++ b/test/resources/configurations/scheduler/SchedulerConfTest1.xml @@ -0,0 +1,33 @@ + + + + + + + airflow + com.linkedin.drelephant.schedulers.AirflowScheduler + + http://localhost:8000 + + + + + azkaban + com.linkedin.drelephant.schedulers.AzkabanScheduler + + + diff --git a/test/resources/configurations/scheduler/SchedulerConfTest2.xml b/test/resources/configurations/scheduler/SchedulerConfTest2.xml new file mode 100644 index 000000000..cc64a051f --- /dev/null +++ b/test/resources/configurations/scheduler/SchedulerConfTest2.xml @@ -0,0 +1,32 @@ + + + + + + + airflow + + http://localhost:8000 + + + + + azkaban + com.linkedin.drelephant.schedulers.AzkabanScheduler + + + diff --git a/test/resources/configurations/scheduler/SchedulerConfTest3.xml b/test/resources/configurations/scheduler/SchedulerConfTest3.xml new file mode 100644 index 000000000..1b524d59b --- /dev/null +++ b/test/resources/configurations/scheduler/SchedulerConfTest3.xml @@ -0,0 +1,32 @@ + + + + + + + airflow + com.linkedin.drelephant.schedulers.AirflowScheduler + + http://localhost:8000 + + + + + com.linkedin.drelephant.schedulers.AzkabanScheduler + + + diff --git a/test/resources/core-site.xml b/test/resources/core-site.xml new file mode 100644 index 000000000..403589589 --- /dev/null +++ b/test/resources/core-site.xml @@ -0,0 +1,53 @@ + + + + + + + + + + + mapreduce.framework.name + yarn + + + + yarn.resourcemanager.webapp.address + localhost:19888 + + + + dfs.nameservices + sample + + + + dfs.ha.namenodes.sample + ha1,ha2 + + + + dfs.namenode.http-address.sample.ha1 + sample-ha1.grid.company.com:50070 + + + + dfs.namenode.http-address.sample.ha2 + sample-ha2.grid.company.com:50070 + + + + diff --git a/test/resources/mrdata/mapperTaskCounter1.properties b/test/resources/mrdata/mapperTaskCounter1.properties new file mode 100644 index 000000000..424b12fb4 --- /dev/null +++ b/test/resources/mrdata/mapperTaskCounter1.properties @@ -0,0 +1,3 @@ +org.apache.hadoop.mapreduce.FileSystemCounter={FILE_LARGE_READ_OPS=0, FILE_WRITE_OPS=0, HDFS_READ_OPS=4, HDFS_BYTES_READ=268, HDFS_LARGE_READ_OPS=0, FILE_READ_OPS=0, FILE_BYTES_WRITTEN=117858, FILE_BYTES_READ=0, HDFS_WRITE_OPS=0, HDFS_BYTES_WRITTEN=0} +org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter={BYTES_READ=118} +org.apache.hadoop.mapreduce.TaskCounter={MAP_OUTPUT_MATERIALIZED_BYTES=28, SPILLED_RECORDS=2, MERGED_MAP_OUTPUTS=0, VIRTUAL_MEMORY_BYTES=0, MAP_INPUT_RECORDS=1, SPLIT_RAW_BYTES=150, FAILED_SHUFFLE=0, MAP_OUTPUT_BYTES=18, PHYSICAL_MEMORY_BYTES=0, GC_TIME_MILLIS=46, MAP_OUTPUT_RECORDS=2, COMBINE_INPUT_RECORDS=0, CPU_MILLISECONDS=0, COMMITTED_HEAP_BYTES=201326592} \ No newline at end of file diff --git a/test/resources/mrdata/mapperTaskCounter2.properties b/test/resources/mrdata/mapperTaskCounter2.properties new file mode 100644 index 000000000..424b12fb4 --- /dev/null +++ b/test/resources/mrdata/mapperTaskCounter2.properties @@ -0,0 +1,3 @@ +org.apache.hadoop.mapreduce.FileSystemCounter={FILE_LARGE_READ_OPS=0, FILE_WRITE_OPS=0, HDFS_READ_OPS=4, HDFS_BYTES_READ=268, HDFS_LARGE_READ_OPS=0, FILE_READ_OPS=0, FILE_BYTES_WRITTEN=117858, FILE_BYTES_READ=0, HDFS_WRITE_OPS=0, HDFS_BYTES_WRITTEN=0} +org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter={BYTES_READ=118} +org.apache.hadoop.mapreduce.TaskCounter={MAP_OUTPUT_MATERIALIZED_BYTES=28, SPILLED_RECORDS=2, MERGED_MAP_OUTPUTS=0, VIRTUAL_MEMORY_BYTES=0, MAP_INPUT_RECORDS=1, SPLIT_RAW_BYTES=150, FAILED_SHUFFLE=0, MAP_OUTPUT_BYTES=18, PHYSICAL_MEMORY_BYTES=0, GC_TIME_MILLIS=46, MAP_OUTPUT_RECORDS=2, COMBINE_INPUT_RECORDS=0, CPU_MILLISECONDS=0, COMMITTED_HEAP_BYTES=201326592} \ No newline at end of file diff --git a/test/resources/mrdata/mapperTaskCounter3.properties b/test/resources/mrdata/mapperTaskCounter3.properties new file mode 100644 index 000000000..239c26f0d --- /dev/null +++ b/test/resources/mrdata/mapperTaskCounter3.properties @@ -0,0 +1,3 @@ +org.apache.hadoop.mapreduce.FileSystemCounter={FILE_LARGE_READ_OPS=0, FILE_WRITE_OPS=0, HDFS_READ_OPS=4, HDFS_BYTES_READ=268, HDFS_LARGE_READ_OPS=0, FILE_READ_OPS=0, FILE_BYTES_WRITTEN=117858, FILE_BYTES_READ=0, HDFS_WRITE_OPS=0, HDFS_BYTES_WRITTEN=0} +org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter={BYTES_READ=118} +org.apache.hadoop.mapreduce.TaskCounter={MAP_OUTPUT_MATERIALIZED_BYTES=28, SPILLED_RECORDS=2, MERGED_MAP_OUTPUTS=0, VIRTUAL_MEMORY_BYTES=0, MAP_INPUT_RECORDS=1, SPLIT_RAW_BYTES=150, FAILED_SHUFFLE=0, MAP_OUTPUT_BYTES=18, PHYSICAL_MEMORY_BYTES=0, GC_TIME_MILLIS=43, MAP_OUTPUT_RECORDS=2, COMBINE_INPUT_RECORDS=0, CPU_MILLISECONDS=0, COMMITTED_HEAP_BYTES=201326592} \ No newline at end of file diff --git a/test/resources/mrdata/reducerTaskCounter1.properties b/test/resources/mrdata/reducerTaskCounter1.properties new file mode 100644 index 000000000..00f316618 --- /dev/null +++ b/test/resources/mrdata/reducerTaskCounter1.properties @@ -0,0 +1,4 @@ +org.apache.hadoop.mapreduce.FileSystemCounter={FILE_LARGE_READ_OPS=0, FILE_WRITE_OPS=0, HDFS_READ_OPS=3, HDFS_BYTES_READ=0, HDFS_LARGE_READ_OPS=0, FILE_READ_OPS=0, FILE_BYTES_WRITTEN=117847, FILE_BYTES_READ=72, HDFS_WRITE_OPS=3, HDFS_BYTES_WRITTEN=215} +org.apache.hadoop.mapreduce.lib.output.FileOutputFormatCounter={BYTES_WRITTEN=97} +org.apache.hadoop.mapreduce.TaskCounter={REDUCE_INPUT_RECORDS=6, SPILLED_RECORDS=6, MERGED_MAP_OUTPUTS=3, VIRTUAL_MEMORY_BYTES=0, FAILED_SHUFFLE=0, REDUCE_SHUFFLE_BYTES=84, PHYSICAL_MEMORY_BYTES=0, GC_TIME_MILLIS=38, REDUCE_INPUT_GROUPS=2, COMBINE_OUTPUT_RECORDS=0, SHUFFLED_MAPS=3, REDUCE_OUTPUT_RECORDS=0, COMBINE_INPUT_RECORDS=0, CPU_MILLISECONDS=0, COMMITTED_HEAP_BYTES=147849216} +Shuffle Errors={CONNECTION=0, WRONG_LENGTH=0, BAD_ID=0, WRONG_REDUCE=0, IO_ERROR=0, WRONG_MAP=0} \ No newline at end of file diff --git a/test/resources/mrdata/sampleJobConf.properties b/test/resources/mrdata/sampleJobConf.properties new file mode 100644 index 000000000..4b9781d6c --- /dev/null +++ b/test/resources/mrdata/sampleJobConf.properties @@ -0,0 +1,4 @@ +azkaban.link.job.url=https://elephant.linkedin.com:8443/manager?project=b2-confirm-email-reminder&flow=reminder&job=overwriter-reminder2 +azkaban.link.attempt.url=https://elephant.linkedin.com:8443/executor?execid=1654676&job=overwriter-reminder2&attempt=0 +azkaban.link.workflow.url=https://elephant.linkedin.com:8443/manager?project=b2-confirm-email-reminder&flow=reminder +azkaban.link.execution.url=https://elephant.linkedin.com:8443/executor?execid=1654676 diff --git a/test/resources/mrdata/sampleJobCounter.properties b/test/resources/mrdata/sampleJobCounter.properties new file mode 100644 index 000000000..12f261dbd --- /dev/null +++ b/test/resources/mrdata/sampleJobCounter.properties @@ -0,0 +1,6 @@ +org.apache.hadoop.mapreduce.FileSystemCounter={FILE_LARGE_READ_OPS=0, FILE_WRITE_OPS=0, HDFS_READ_OPS=15, HDFS_BYTES_READ=804, HDFS_LARGE_READ_OPS=0, FILE_READ_OPS=0, FILE_BYTES_WRITTEN=471421, FILE_BYTES_READ=72, HDFS_WRITE_OPS=3, HDFS_BYTES_WRITTEN=215} +org.apache.hadoop.mapreduce.JobCounter={TOTAL_LAUNCHED_MAPS=3, VCORES_MILLIS_REDUCES=1870, MB_MILLIS_MAPS=7876608, TOTAL_LAUNCHED_REDUCES=1, SLOTS_MILLIS_REDUCES=1870, VCORES_MILLIS_MAPS=7692, MB_MILLIS_REDUCES=1914880, SLOTS_MILLIS_MAPS=7692, MILLIS_REDUCES=1870, MILLIS_MAPS=7692, DATA_LOCAL_MAPS=3} +org.apache.hadoop.mapreduce.lib.output.FileOutputFormatCounter={BYTES_WRITTEN=97} +org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter={BYTES_READ=354} +org.apache.hadoop.mapreduce.TaskCounter={MAP_OUTPUT_MATERIALIZED_BYTES=84, REDUCE_INPUT_RECORDS=6, SPILLED_RECORDS=12, MERGED_MAP_OUTPUTS=3, VIRTUAL_MEMORY_BYTES=0, MAP_INPUT_RECORDS=3, SPLIT_RAW_BYTES=450, FAILED_SHUFFLE=0, MAP_OUTPUT_BYTES=54, REDUCE_SHUFFLE_BYTES=84, PHYSICAL_MEMORY_BYTES=0, GC_TIME_MILLIS=173, REDUCE_INPUT_GROUPS=2, COMBINE_OUTPUT_RECORDS=0, SHUFFLED_MAPS=3, REDUCE_OUTPUT_RECORDS=0, MAP_OUTPUT_RECORDS=6, COMBINE_INPUT_RECORDS=0, CPU_MILLISECONDS=0, COMMITTED_HEAP_BYTES=751828992} +Shuffle Errors={CONNECTION=0, WRONG_LENGTH=0, BAD_ID=0, WRONG_REDUCE=0, IO_ERROR=0, WRONG_MAP=0} \ No newline at end of file diff --git a/test/resources/spark-defaults.conf b/test/resources/spark-defaults.conf new file mode 100644 index 000000000..de9cc375a --- /dev/null +++ b/test/resources/spark-defaults.conf @@ -0,0 +1,4 @@ +spark.yarn.historyServer.address = jh1.grid.example.com:18080 +spark.eventLog.enabled = true +spark.eventLog.compress = true +spark.eventLog.dir = hdfs://nn1.grid.example.com:9000/logs/spark diff --git a/test/resources/spark_event_logs/event_log_2 b/test/resources/spark_event_logs/event_log_2 new file mode 100644 index 000000000..35101daa4 --- /dev/null +++ b/test/resources/spark_event_logs/event_log_2 @@ -0,0 +1,4 @@ +{"Event":"SparkListenerLogStart","Spark Version":"1.4.1"} +{"Event":"SparkListenerBlockManagerAdded","Block Manager ID":{"Executor ID":"driver","Host":"127.0.0.1","Port":54157},"Maximum Memory":515411804,"Timestamp":1475761114342} +{"Event":"SparkListenerEnvironmentUpdate","Spark Properties":{"spark.serializer":"org.apache.spark.serializer.KryoSerializer","spark.storage.memoryFraction":"0.3","spark.driver.memory":"2G","spark.executor.instances":"900","spark.executor.memory":"1g","spark.shuffle.memoryFraction":"0.5"},"JVM Information":{},"System Properties":{},"Classpath Entries":{}} +{"Event":"SparkListenerApplicationStart","App Name":"app","App ID":"application_1","Timestamp":1475761112578,"User":"foo","App Attempt ID":"1"} diff --git a/test/resources/test-init.sql b/test/resources/test-init.sql new file mode 100644 index 000000000..4cb8a72c1 --- /dev/null +++ b/test/resources/test-init.sql @@ -0,0 +1,8 @@ +insert into yarn_app_result + (id,name,username,queue_name,start_time,finish_time,tracking_url,job_type,severity,score,workflow_depth,scheduler,job_name,job_exec_id,flow_exec_id,job_def_id,flow_def_id,job_exec_url,flow_exec_url,job_def_url,flow_def_url,resource_used,resource_wasted,total_delay) values + ('application_1458194917883_1453361','Email Overwriter','growth','misc_default',1460980616502,1460980723925,'http://elephant.linkedin.com:19888/jobhistory/job/job_1458194917883_1453361','HadoopJava',0,0,0,'azkaban','overwriter-reminder2','https://elephant.linkedin.com:8443/executor?execid=1654676&job=overwriter-reminder2&attempt=0','https://elephant.linkedin.com:8443/executor?execid=1654676','https://elephant.linkedin.com:8443/manager?project=b2-confirm-email-reminder&flow=reminder&job=overwriter-reminder2','https://elephant.linkedin.com:8443/manager?project=b2-confirm-email-reminder&flow=reminder','https://elephant.linkedin.com:8443/executor?execid=1654676&job=overwriter-reminder2&attempt=0','https://elephant.linkedin.com:8443/executor?execid=1654676','https://elephant.linkedin.com:8443/manager?project=b2-confirm-email-reminder&flow=reminder&job=overwriter-reminder2','https://elephant.linkedin.com:8443/manager?project=b2-confirm-email-reminder&flow=reminder', 100, 30, 20), + ('application_1458194917883_1453362','Email Overwriter','metrics','misc_default',1460980823925,1460980923925,'http://elephant.linkedin.com:19888/jobhistory/job/job_1458194917883_1453362','HadoopJava',0,0,0,'azkaban','overwriter-reminder2','https://elephant.linkedin.com:8443/executor?execid=1654677&job=overwriter-reminder2&attempt=0','https://elephant.linkedin.com:8443/executor?execid=1654677','https://elephant.linkedin.com:8443/manager?project=b2-confirm-email-reminder&flow=reminder&job=overwriter-reminder2','https://elephant.linkedin.com:8443/manager?project=b2-confirm-email-reminder&flow=reminder','https://elephant.linkedin.com:8443/executor?execid=1654677&job=overwriter-reminder2&attempt=0','https://elephant.linkedin.com:8443/executor?execid=1654677','https://elephant.linkedin.com:8443/manager?project=b2-confirm-email-reminder&flow=reminder&job=overwriter-reminder2','https://elephant.linkedin.com:8443/manager?project=b2-confirm-email-reminder&flow=reminder', 200, 40, 10); + +insert into yarn_app_heuristic_result(id,yarn_app_result_id,heuristic_class,heuristic_name,severity,score) values (137594512,'application_1458194917883_1453361','com.linkedin.drelephant.mapreduce.heuristics.MapperDataSkewHeuristic','Mapper Data Skew',0,0), (137594513,'application_1458194917883_1453361','com.linkedin.drelephant.mapreduce.heuristics.MapperGCHeuristic','Mapper GC',0,0), (137594516,'application_1458194917883_1453361','com.linkedin.drelephant.mapreduce.heuristics.MapperTimeHeuristic','Mapper Time',0,0), (137594520,'application_1458194917883_1453361','com.linkedin.drelephant.mapreduce.heuristics.MapperSpeedHeuristic','Mapper Speed',0,0), (137594523,'application_1458194917883_1453361','com.linkedin.drelephant.mapreduce.heuristics.MapperSpillHeuristic','Mapper Spill',0,0), (137594525,'application_1458194917883_1453361','com.linkedin.drelephant.mapreduce.heuristics.MapperMemoryHeuristic','Mapper Memory',0,0), (137594530,'application_1458194917883_1453361','com.linkedin.drelephant.mapreduce.heuristics.ReducerDataSkewHeuristic','Reducer Data Skew',0,0), (137594531,'application_1458194917883_1453361','com.linkedin.drelephant.mapreduce.heuristics.ReducerGCHeuristic','Reducer Time',0,0), (137594534,'application_1458194917883_1453361','com.linkedin.drelephant.mapreduce.heuristics.ReducerTimeHeuristic','Reducer GC',0,0), (137594537,'application_1458194917883_1453361','com.linkedin.drelephant.mapreduce.heuristics.ReducerMemoryHeuristic','Reducer Memory',0,0), (137594540,'application_1458194917883_1453361','com.linkedin.drelephant.mapreduce.heuristics.ShuffleSortHeuristic','Shuffle & Sort',0,0), (137594612,'application_1458194917883_1453362','com.linkedin.drelephant.mapreduce.heuristics.MapperDataSkewHeuristic','Mapper Data Skew',0,0), (137594613,'application_1458194917883_1453362','com.linkedin.drelephant.mapreduce.heuristics.MapperGCHeuristic','Mapper GC',0,0), (137594616,'application_1458194917883_1453362','com.linkedin.drelephant.mapreduce.heuristics.MapperTimeHeuristic','Mapper Time',0,0), (137594620,'application_1458194917883_1453362','com.linkedin.drelephant.mapreduce.heuristics.MapperSpeedHeuristic','Mapper Speed',0,0), (137594623,'application_1458194917883_1453362','com.linkedin.drelephant.mapreduce.heuristics.MapperSpillHeuristic','Mapper Spill',0,0), (137594625,'application_1458194917883_1453362','com.linkedin.drelephant.mapreduce.heuristics.MapperMemoryHeuristic','Mapper Memory',0,0), (137594630,'application_1458194917883_1453362','com.linkedin.drelephant.mapreduce.heuristics.ReducerDataSkewHeuristic','Reducer Data Skew',0,0), (137594631,'application_1458194917883_1453362','com.linkedin.drelephant.mapreduce.heuristics.ReducerGCHeuristic','Reducer Time',0,0), (137594634,'application_1458194917883_1453362','com.linkedin.drelephant.mapreduce.heuristics.ReducerTimeHeuristic','Reducer GC',0,0), (137594637,'application_1458194917883_1453362','com.linkedin.drelephant.mapreduce.heuristics.ReducerMemoryHeuristic','Reducer Memory',0,0), (137594640,'application_1458194917883_1453362','com.linkedin.drelephant.mapreduce.heuristics.ShuffleSortHeuristic','Shuffle & Sort',0,0); + +insert into yarn_app_heuristic_result_details (yarn_app_heuristic_result_id,name,value,details) values (137594512,'Group A','1 tasks @ 4 MB avg','NULL'), (137594512,'Group B','1 tasks @ 79 MB avg','NULL'), (137594512,'Number of tasks','2','NULL'), (137594513,'Avg task CPU time (ms)','11510','NULL'), (137594513,'Avg task GC time (ms)','76','NULL'), (137594513,'Avg task runtime (ms)','11851','NULL'), (137594513,'Number of tasks','2','NULL'), (137594513,'Task GC/CPU ratio','0.006602953953084275 ','NULL'), (137594516,'Average task input size','42 MB','NULL'), (137594516,'Average task runtime','11 sec','NULL'), (137594516,'Max task runtime','12 sec','NULL'), (137594516,'Min task runtime','11 sec','NULL'), (137594516,'Number of tasks','2','NULL'), (137594520,'Median task input size','42 MB','NULL'), (137594520,'Median task runtime','11 sec','NULL'), (137594520,'Median task speed','3 MB/s','NULL'), (137594520,'Number of tasks','2','NULL'), (137594523,'Avg output records per task','56687','NULL'), (137594523,'Avg spilled records per task','79913','NULL'), (137594523,'Number of tasks','2','NULL'), (137594523,'Ratio of spilled records to output records','1.4097111356119074','NULL'), (137594525,'Avg Physical Memory (MB)','522','NULL'), (137594525,'Avg task runtime','11 sec','NULL'), (137594525,'Avg Virtual Memory (MB)','3307','NULL'), (137594525,'Max Physical Memory (MB)','595','NULL'), (137594525,'Min Physical Memory (MB)','449','NULL'), (137594525,'Number of tasks','2','NULL'), (137594525,'Requested Container Memory','2 GB','NULL'), (137594530,'Group A','11 tasks @ 868 KB avg','NULL'), (137594530,'Group B','9 tasks @ 883 KB avg ','NULL'), (137594530,'Number of tasks','20','NULL'), (137594531,'Avg task CPU time (ms)','8912','NULL'), (137594531,'Avg task GC time (ms)','73','NULL'), (137594531,'Avg task runtime (ms)','11045','NULL'), (137594531,'Number of tasks','20','NULL'), (137594531,'Task GC/CPU ratio','0.008191202872531419 ','NULL'), (137594534,'Average task runtime','11 sec','NULL'), (137594534,'Max task runtime','14 sec','NULL'), (137594534,'Min task runtime','8 sec','NULL'), (137594534,'Number of tasks','20','NULL'), (137594537,'Avg Physical Memory (MB)','416','NULL'), (137594537,'Avg task runtime','11 sec','NULL'), (137594537,'Avg Virtual Memory (MB)','3326','NULL'), (137594537,'Max Physical Memory (MB)','497','NULL'), (137594537,'Min Physical Memory (MB)','354','NULL'), (137594537,'Number of tasks','20','NULL'), (137594537,'Requested Container Memory','2 GB','NULL'), (137594540,'Average code runtime','1 sec','NULL'), (137594540,'Average shuffle time','9 sec (5.49x)','NULL'), (137594540,'Average sort time','(0.04x)','NULL'), (137594540,'Number of tasks','20','NULL'), (137594612,'Group A','1 tasks @ 4 MB avg','NULL'), (137594612,'Group B','1 tasks @ 79 MB avg','NULL'), (137594612,'Number of tasks','2','NULL'), (137594613,'Avg task CPU time (ms)','11510','NULL'), (137594613,'Avg task GC time (ms)','76','NULL'), (137594613,'Avg task runtime (ms)','11851','NULL'), (137594613,'Number of tasks','2','NULL'), (137594613,'Task GC/CPU ratio','0.006602953953084275 ','NULL'), (137594616,'Average task input size','42 MB','NULL'), (137594616,'Average task runtime','11 sec','NULL'), (137594616,'Max task runtime','12 sec','NULL'), (137594616,'Min task runtime','11 sec','NULL'), (137594616,'Number of tasks','2','NULL'), (137594620,'Median task input size','42 MB','NULL'), (137594620,'Median task runtime','11 sec','NULL'), (137594620,'Median task speed','3 MB/s','NULL'), (137594620,'Number of tasks','2','NULL'), (137594623,'Avg output records per task','56687','NULL'), (137594623,'Avg spilled records per task','79913','NULL'), (137594623,'Number of tasks','2','NULL'), (137594623,'Ratio of spilled records to output records','1.4097111356119074','NULL'), (137594625,'Avg Physical Memory (MB)','522','NULL'), (137594625,'Avg task runtime','11 sec','NULL'), (137594625,'Avg Virtual Memory (MB)','3307','NULL'), (137594625,'Max Physical Memory (MB)','595','NULL'), (137594625,'Min Physical Memory (MB)','449','NULL'), (137594625,'Number of tasks','2','NULL'), (137594625,'Requested Container Memory','2 GB','NULL'), (137594630,'Group A','11 tasks @ 868 KB avg','NULL'), (137594630,'Group B','9 tasks @ 883 KB avg ','NULL'), (137594630,'Number of tasks','20','NULL'), (137594631,'Avg task CPU time (ms)','8912','NULL'), (137594631,'Avg task GC time (ms)','73','NULL'), (137594631,'Avg task runtime (ms)','11045','NULL'), (137594631,'Number of tasks','20','NULL'), (137594631,'Task GC/CPU ratio','0.008191202872531419 ','NULL'), (137594634,'Average task runtime','11 sec','NULL'), (137594634,'Max task runtime','14 sec','NULL'), (137594634,'Min task runtime','8 sec','NULL'), (137594634,'Number of tasks','20','NULL'), (137594637,'Avg Physical Memory (MB)','416','NULL'), (137594637,'Avg task runtime','11 sec','NULL'), (137594637,'Avg Virtual Memory (MB)','3326','NULL'), (137594637,'Max Physical Memory (MB)','497','NULL'), (137594637,'Min Physical Memory (MB)','354','NULL'), (137594637,'Number of tasks','20','NULL'), (137594637,'Requested Container Memory','2 GB','NULL'), (137594640,'Average code runtime','1 sec','NULL'), (137594640,'Average shuffle time','9 sec (5.49x)','NULL'), (137594640,'Average sort time','(0.04x)','NULL'), (137594640,'Number of tasks','20','NULL'); diff --git a/test/rest/RestAPITest.java b/test/rest/RestAPITest.java new file mode 100644 index 000000000..9e055ee4a --- /dev/null +++ b/test/rest/RestAPITest.java @@ -0,0 +1,821 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package rest; + +import com.fasterxml.jackson.databind.JsonNode; +import com.linkedin.drelephant.util.Utils; +import common.DBTestUtil; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import play.Application; +import play.GlobalSettings; +import play.libs.WS; +import play.test.FakeApplication; + +import static common.DBTestUtil.*; +import static common.TestConstants.*; +import static org.junit.Assert.assertTrue; +import static play.test.Helpers.fakeApplication; +import static play.test.Helpers.running; +import static play.test.Helpers.testServer; + + +/** + *

+ * Class aims to exercise all the rest end points exposed by Dr.Elephant + *

+ *

+ * A fake application connecting to an in-memory H2 DB is started inside
+ * the test server which runs the test code. The global class is overridden
+ * so that we don't have to go through the regular application start flow. + *

+ */ +public class RestAPITest { + + private static final Logger logger = LoggerFactory.getLogger(RestAPITest.class); + private static FakeApplication fakeApp; + + @Before + public void setup() { + Map dbConn = new HashMap(); + dbConn.put(DB_DEFAULT_DRIVER_KEY, DB_DEFAULT_DRIVER_VALUE); + dbConn.put(DB_DEFAULT_URL_KEY, DB_DEFAULT_URL_VALUE); + dbConn.put(EVOLUTION_PLUGIN_KEY, EVOLUTION_PLUGIN_VALUE); + dbConn.put(APPLY_EVOLUTIONS_DEFAULT_KEY, APPLY_EVOLUTIONS_DEFAULT_VALUE); + + GlobalSettings gs = new GlobalSettings() { + @Override + public void onStart(Application app) { + logger.info("Starting FakeApplication"); + } + }; + + fakeApp = fakeApplication(dbConn, gs); + } + + /** + *

+ * Rest API - Performs search by job ID + *
+ * API provides information on the specific job + *

+ *

+ * Following assertions are made in the response json + *

    Job id
+ *
    Job name
+ *
    Job type
+ *

+ */ + @Test + public void testrestAppResult() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_APP_RESULT_PATH). + setQueryParameter("id", TEST_JOB_ID1). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + final JsonNode jsonResponse = response.asJson(); + assertTrue("Job id did not match", TEST_JOB_ID1.equals(jsonResponse.path("id").asText())); + assertTrue("Job name did not match", TEST_JOB_NAME.equals(jsonResponse.path("name").asText())); + assertTrue("Job type did not match", TEST_JOB_TYPE.equals(jsonResponse.path("jobType").asText())); + } + }); + } + + /** + *

+ * Rest API - Performs search by job execution ID + *
+ * API returns all jobs triggered by a particular Scheduler Job + *

+ *

+ * Following assertions are made in the response json + *

    Job id
+ *
    Job execution id
+ *

+ */ + @Test + public void testrestJobExecResult() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_JOB_EXEC_RESULT_PATH). + setQueryParameter("id", TEST_JOB_EXEC_ID1). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + final JsonNode jsonResponse = response.asJson().get(0); + assertTrue("Job id did not match", TEST_JOB_ID1.equals(jsonResponse.path("id").asText())); + assertTrue("Job execution id did not match", TEST_JOB_EXEC_ID1.equals(jsonResponse.path("jobExecId").asText())); + } + }); + } + + /** + *

+ * Rest API - Performs search by flow execution ID + *
+ * API returns all jobs under a particular flow execution + *

+ *

+ * Following assertions are made in the response json + *

    Job id
+ *
    Flow execution id
+ *

+ */ + @Test + public void testrestFlowExecResult() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_FLOW_EXEC_RESULT_PATH). + setQueryParameter("id", TEST_FLOW_EXEC_ID1). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + final JsonNode jsonResponse = response.asJson(); + assertTrue("Job id did not match", TEST_JOB_ID1.equals(jsonResponse.findValue("id").asText())); + assertTrue("Flow execution id did not match", + TEST_FLOW_EXEC_ID1.equals(jsonResponse.findValue("flowExecId").asText())); + } + }); + } + + /** + *

+ * Rest API - Perform a generic search or search by filter criteria + *
+ * Test verifies if all available flows are returned + *

+ *

+ * Following assertions are made in the response json + *

    First job id
+ *
    Second job id
+ *

+ */ + @Test + public void testrestSearch() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_SEARCH_PATH). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + List jobList = response.asJson().findValuesAsText("id"); + assertTrue("Job id1 missing in list", jobList.contains(TEST_JOB_ID1)); + assertTrue("Job id2 missing in list", jobList.contains(TEST_JOB_ID2)); + } + }); + } + + /** + *

+ * Rest API - Perform a search with additional params + *
+ * Test verifies if specific flow is returned + *

+ *

+ * Following assertions are made in the response json + *

    No of jobs returned
+ *
    Job id
+ *
    Username
+ *
    Job type
+ *

+ */ + @Test + public void testrestSearchWithUsernameAndJobType() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_SEARCH_PATH). + setQueryParameter("username", TEST_USERNAME). + setQueryParameter("", TEST_JOB_TYPE). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + JsonNode reponseJson = response.asJson(); + List jobList = reponseJson.findValuesAsText("id"); + assertTrue("More than one row returned", jobList.size() == 1); + assertTrue("Job id missing in response", TEST_JOB_ID1.equals(reponseJson.findValue("id").asText())); + assertTrue("Username incorrect", TEST_USERNAME.equals(reponseJson.findValue("username").asText())); + assertTrue("Job type incorrect", TEST_JOB_TYPE.equals(reponseJson.findValue("jobType").asText())); + } + }); + } + + /** + *

+ * Rest API - Compares two flow executions by flow execution ID + *

+ *

+ * Following assertions are made in the response json + *

    Second job ID
+ *

+ */ + @Test + public void testrestCompare() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_COMPARE_PATH). + setQueryParameter("flow-exec-id1", TEST_FLOW_EXEC_ID1). + setQueryParameter("flow-exec-id2", TEST_FLOW_EXEC_ID2). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + assertTrue("Job id did not match", TEST_JOB_ID2.equals(response.asJson().findValue("id").asText())); + } + }); + } + + /** + *

+ * Rest API - Provides data for plotting the flow history graph + *

+ *

+ * Following assertions are made in the response json + *

    First job execution ID
+ *
    Second job execution ID
+ *

+ */ + @Test + public void testrestFlowGraphData() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_FLOW_GRAPH_DATA_PATH). + setQueryParameter("id", TEST_FLOW_DEF_ID1). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + List jobList = response.asJson().findValuesAsText("jobexecurl"); + assertTrue("Job exec url1 missing in list", jobList.contains(TEST_JOB_EXEC_ID1)); + assertTrue("Job exec url2 missing in list", jobList.contains(TEST_JOB_EXEC_ID2)); + } + }); + } + + /** + *

+ * Rest API - Provides data for plotting the job history graph + *

+ *

+ * Following assertions are made in the response json + *

    First job id
+ *
    Second job id
+ *

+ */ + @Test + public void testrestJobGraphData() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_JOB_GRAPH_DATA_PATH). + setQueryParameter("id", TEST_JOB_DEF_ID1). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + List jobList = response.asJson().findValuesAsText("stageid"); + assertTrue("Job id 1 missing in list", jobList.contains(TEST_JOB_ID1)); + assertTrue("Job id 2 missing in list", jobList.contains(TEST_JOB_ID2)); + } + }); + } + + /** + *

+ * Rest API - Provides data for plotting the job history graph for time and resources + *

+ */ + public void testrestJobMetricsGraphData() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_JOB_METRICS_GRAPH_DATA_PATH). + setQueryParameter("id", TEST_JOB_DEF_ID1). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + List jobList = response.asJson().findValuesAsText("stageid"); + assertTrue("Job id 1 missing in list", jobList.contains(TEST_JOB_ID1)); + assertTrue("Job id 2 missing in list", jobList.contains(TEST_JOB_ID2)); + } + }); + } + + /** + *

+ * Rest API - Provides data for plotting the flow history graph for time and resources + *

+ */ + @Test + public void testrestFlowMetricsGraphData() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_FLOW_METRICS_GRAPH_DATA_PATH). + setQueryParameter("id", TEST_FLOW_DEF_ID1). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + List jobList = response.asJson().findValuesAsText("jobexecurl"); + assertTrue("Job exec url1 missing in list", jobList.contains(TEST_JOB_EXEC_ID1)); + assertTrue("Job exec url2 missing in list", jobList.contains(TEST_JOB_EXEC_ID2)); + } + }); + } + + @Test + public void testRestUserResourceUsage() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_USER_RESOURCE_USAGE_PATH). + setQueryParameter("startTime", TEST_START_TIME1). + setQueryParameter("endTime", TEST_END_TIME1). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + Iterator userResources = response.asJson().elements(); + while (userResources.hasNext()) { + JsonNode userResourceUsage = userResources.next(); + if (userResourceUsage.findValue("user").asText().equals("growth")) { + assertTrue("Wrong resourceusage for user growth", + userResourceUsage.findValue("resourceUsed").asDouble() == Utils.MBSecondsToGBHours(100)); + assertTrue("Wrong wastedResources for user growth", + userResourceUsage.findValue("resourceWasted").asDouble() == Utils.MBSecondsToGBHours(30)); + } else if (userResourceUsage.findValue("user").asText().equals("metrics")) { + assertTrue("Wrong resourceusage for user metrics", + userResourceUsage.findValue("resourceUsed").asDouble() == Utils.MBSecondsToGBHours(200)); + assertTrue("Wrong wastedResources for user metrics", + userResourceUsage.findValue("resourceWasted").asDouble() == Utils.MBSecondsToGBHours(40)); + } else { + assertTrue("Unexpected user" + userResourceUsage.findValue("user").asText(), false); + } + } + } + }); + } + + @Test + public void testRestUserResourceUsageBadInput() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_USER_RESOURCE_USAGE_PATH). + setQueryParameter("startTime", TEST_START_TIME1). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + assertTrue("Invalid input test failed", response.getStatus() == 400); + } + }); + } + + @Test + public void testRestWorkflowForuser() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_WORKFLOW_SUMMARIES_PATH). + setQueryParameter("username", TEST_USERNAME). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + Iterator workflowSummaries = response.asJson().elements(); + while (workflowSummaries.hasNext()) { + JsonNode workflowSummary = workflowSummaries.next(); + Iterator workflowObjects = workflowSummary.elements(); + while (workflowObjects.hasNext()) { + JsonNode node = workflowObjects.next(); + Assert.assertEquals(node.findValue("username").asText(), "growth"); + Assert.assertEquals(node.findValue("starttime").asLong(), 1460980616502L); + Assert.assertEquals(node.findValue("finishtime").asLong(), 1460980723925L); + Assert.assertEquals(node.findValue("waittime").asLong(), 20); + Assert.assertEquals(node.findValue("resourceused").asLong(), 100); + Assert.assertEquals(node.findValue("resourcewasted").asLong(), 30); + Assert.assertEquals(node.findValue("severity").asText(), "None"); + Assert.assertEquals(node.findValue("queue").asText(), "misc_default"); + + Iterator jobs = node.findValue("jobsseverity").elements(); + while (jobs.hasNext()) { + JsonNode job = jobs.next(); + Assert.assertEquals(job.findValue("severity").asText(), "None"); + Assert.assertEquals(job.findValue("count").asInt(), 1); + } + } + } + } + }); + } + + @Test + public void testRestJobForUser() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_JOB_SUMMARIES_PATH). + setQueryParameter("username", TEST_USERNAME). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + Iterator jobSummaries = response.asJson().elements(); + while (jobSummaries.hasNext()) { + JsonNode jobSummary = jobSummaries.next(); + Iterator jobObjects = jobSummary.elements(); + while (jobObjects.hasNext()) { + JsonNode node = jobObjects.next(); + Assert.assertEquals(node.findValue("username").asText(), "growth"); + Assert.assertEquals(node.findValue("jobname").asText(), "overwriter-reminder2"); + Assert.assertEquals(node.findValue("jobtype").asText(), "HadoopJava"); + Assert.assertEquals(node.findValue("starttime").asLong(), 1460980616502L); + Assert.assertEquals(node.findValue("finishtime").asLong(), 1460980723925L); + Assert.assertEquals(node.findValue("waittime").asLong(), 20); + Assert.assertEquals(node.findValue("resourceused").asLong(), 100); + Assert.assertEquals(node.findValue("resourcewasted").asLong(), 30); + Assert.assertEquals(node.findValue("severity").asText(), "None"); + Assert.assertEquals(node.findValue("queue").asText(), "misc_default"); + + Iterator tasks = node.findValue("tasksseverity").elements(); + while (tasks.hasNext()) { + JsonNode job = tasks.next(); + Assert.assertEquals(job.findValue("severity").asText(), "None"); + Assert.assertEquals(job.findValue("count").asInt(), 1); + } + } + } + } + }); + } + + @Test + public void testRestApplicationForUser() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_APPLICATION_SUMMARIES_PATH). + setQueryParameter("username", TEST_USERNAME). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + Iterator taskSummaries = response.asJson().elements(); + while (taskSummaries.hasNext()) { + JsonNode taskSummary = taskSummaries.next(); + Iterator jobObjects = taskSummary.elements(); + while (jobObjects.hasNext()) { + JsonNode node = jobObjects.next(); + Assert.assertEquals(node.findValue("username").asText(), "growth"); + Assert.assertEquals(node.findValue("starttime").asLong(), 1460980616502L); + Assert.assertEquals(node.findValue("finishtime").asLong(), 1460980723925L); + Assert.assertEquals(node.findValue("waittime").asLong(), 20); + Assert.assertEquals(node.findValue("resourceused").asLong(), 100); + Assert.assertEquals(node.findValue("resourcewasted").asLong(), 30); + Assert.assertEquals(node.findValue("severity").asText(), "None"); + Assert.assertEquals(node.findValue("queue").asText(), "misc_default"); + + Iterator heuristicsSummary = node.findValue("heuristicsummary").elements(); + + HashMap expectedHeuristics = new LinkedHashMap(); + expectedHeuristics.put("Mapper Data Skew", "None"); + expectedHeuristics.put("Mapper GC", "None"); + expectedHeuristics.put("Mapper Time", "None"); + expectedHeuristics.put("Mapper Speed", "None"); + expectedHeuristics.put("Mapper Spill", "None"); + expectedHeuristics.put("Mapper Memory", "None"); + expectedHeuristics.put("Reducer Data Skew", "None"); + expectedHeuristics.put("Reducer Time", "None"); + expectedHeuristics.put("Reducer GC", "None"); + expectedHeuristics.put("Reducer Memory", "None"); + expectedHeuristics.put("Shuffle & Sort", "None"); + + Iterator keyIterator = expectedHeuristics.keySet().iterator(); + while (heuristicsSummary.hasNext() && keyIterator.hasNext()) { + JsonNode job = heuristicsSummary.next(); + String key = keyIterator.next().toString(); + Assert.assertEquals(key, job.findValue("name").asText()); + Assert.assertEquals(expectedHeuristics.get(key), job.findValue("severity").asText()); + } + } + } + } + }); + } + + @Test + public void testRestWorkflowFromId() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_WORKFLOWS_PATH). + setQueryParameter("workflowid", TEST_FLOW_EXEC_ID1). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + Iterator workflows = response.asJson().elements(); + while (workflows.hasNext()) { + JsonNode node = workflows.next(); + Assert.assertEquals(node.findValue("username").asText(), "growth"); + Assert.assertEquals(node.findValue("starttime").asLong(), 1460980616502L); + Assert.assertEquals(node.findValue("finishtime").asLong(), 1460980723925L); + Assert.assertEquals(node.findValue("waittime").asLong(), 20); + Assert.assertEquals(node.findValue("resourceused").asLong(), 100); + Assert.assertEquals(node.findValue("resourcewasted").asLong(), 30); + Assert.assertEquals(node.findValue("severity").asText(), "None"); + Assert.assertEquals(node.findValue("queue").asText(), "misc_default"); + } + } + }); + } + + @Test + public void testRestWorkflowFromIdIsEmpty() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_WORKFLOWS_PATH). + setQueryParameter("workflowid", "this_is_a_random_id"). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + JsonNode workflows = response.asJson(); + Assert.assertEquals(workflows.get("username"), null); + Assert.assertEquals(workflows.get("starttime"), null); + Assert.assertEquals(workflows.get("finishtime"), null); + Assert.assertEquals(workflows.get("waittime"), null); + Assert.assertEquals(workflows.get("resourceused"), null); + Assert.assertEquals(workflows.get("resourcewasted"), null); + Assert.assertEquals(workflows.get("severity"), null); + Assert.assertEquals(workflows.get("queue"), null); + } + }); + } + + @Test + public void testRestJobFromId() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_JOBS_PATH). + setQueryParameter("jobid", TEST_JOB_EXEC_ID1). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + Iterator jobs = response.asJson().elements(); + while (jobs.hasNext()) { + JsonNode node = jobs.next(); + Assert.assertEquals(node.findValue("username").asText(), "growth"); + Assert.assertEquals(node.findValue("starttime").asLong(), 1460980616502L); + Assert.assertEquals(node.findValue("finishtime").asLong(), 1460980723925L); + Assert.assertEquals(node.findValue("waittime").asLong(), 20); + Assert.assertEquals(node.findValue("resourceused").asLong(), 100); + Assert.assertEquals(node.findValue("resourcewasted").asLong(), 30); + Assert.assertEquals(node.findValue("severity").asText(), "None"); + Assert.assertEquals(node.findValue("queue").asText(), "misc_default"); + } + } + }); + } + + @Test + public void testRestJobFromIdIsEmpty() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_JOBS_PATH). + setQueryParameter("jobid", "this_is_a_random_job_id"). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + JsonNode jobs = response.asJson(); + Assert.assertEquals(jobs.get("username"), null); + Assert.assertEquals(jobs.get("starttime"), null); + Assert.assertEquals(jobs.get("finishtime"), null); + Assert.assertEquals(jobs.get("waittime"), null); + Assert.assertEquals(jobs.get("resourceused"), null); + Assert.assertEquals(jobs.get("resourcewasted"), null); + Assert.assertEquals(jobs.get("severity"), null); + Assert.assertEquals(jobs.get("queue"), null); + } + }); + } + + @Test + public void testApplicationFromId() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_APPLICATIONS_PATH). + setQueryParameter("applicationid", TEST_JOB_ID1). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + Iterator applications = response.asJson().elements(); + while (applications.hasNext()) { + JsonNode node = applications.next(); + Assert.assertEquals(node.findValue("username").asText(), "growth"); + Assert.assertEquals(node.findValue("starttime").asLong(), 1460980616502L); + Assert.assertEquals(node.findValue("finishtime").asLong(), 1460980723925L); + Assert.assertEquals(node.findValue("waittime").asLong(), 20); + Assert.assertEquals(node.findValue("resourceused").asLong(), 100); + Assert.assertEquals(node.findValue("resourcewasted").asLong(), 30); + Assert.assertEquals(node.findValue("severity").asText(), "None"); + Assert.assertEquals(node.findValue("queue").asText(), "misc_default"); + Assert.assertEquals(node.findValue("trackingurl").asText(), + "http://elephant.linkedin.com:19888/jobhistory/job/job_1458194917883_1453361"); + } + } + }); + } + + @Test + public void testApplicationFromIdIsEmpty() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_APPLICATIONS_PATH). + setQueryParameter("applicationid", "random_id"). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + JsonNode applications = response.asJson(); + Assert.assertEquals(applications.get("username"), null); + Assert.assertEquals(applications.get("starttime"), null); + Assert.assertEquals(applications.get("finishtime"), null); + Assert.assertEquals(applications.get("waittime"), null); + Assert.assertEquals(applications.get("resourceused"), null); + Assert.assertEquals(applications.get("resourcewasted"), null); + Assert.assertEquals(applications.get("severity"), null); + Assert.assertEquals(applications.get("queue"), null); + } + }); + } + + @Test + public void testRestSearchDataParamUserQueue() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_SEARCH_RESULTS). + setQueryParameter("username", "growth").setQueryParameter("queue-name", "misc_default"). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + Iterator searchNode = response.asJson().elements(); + testRestSearchGeneric(searchNode); + } + }); + } + + @Test + public void testRestSearchDataParamTypeUser() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_SEARCH_RESULTS). + setQueryParameter("username", "growth").setQueryParameter("job-type", "HadoopJava"). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + Iterator searchNode = response.asJson().elements(); + testRestSearchGeneric(searchNode); + } + }); + } + + @Test + public void testRestSearchDataParamTimeUser() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_SEARCH_RESULTS). + setQueryParameter("username", "growth").setQueryParameter("finishTimeBegin", "1460980723925") + .setQueryParameter("finishTimeEnd", "1460980723928"). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + Iterator searchNode = response.asJson().elements(); + testRestSearchGeneric(searchNode); + } + }); + } + + @Test + public void testRestSearchOffsetNegative() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_SEARCH_RESULTS). + setQueryParameter("username", "growth").setQueryParameter("offset", "-1"). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + Iterator searchNode = response.asJson().elements(); + testRestSearchGeneric(searchNode); + } + }); + } + + @Test + public void testRestSearchLimitNegative() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_SEARCH_RESULTS). + setQueryParameter("username", "growth").setQueryParameter("limit", "-1"). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + JsonNode searchNode = response.asJson(); + Assert.assertTrue(searchNode.asText().toString().isEmpty()); + } + }); + } + + @Test + public void testRestSearchOffsetZero() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_SEARCH_RESULTS). + setQueryParameter("username", "growth").setQueryParameter("offset", "0"). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + Iterator searchNode = response.asJson().elements(); + testRestSearchGeneric(searchNode); + } + }); + } + + @Test + public void testRestSearchLimitZero() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_SEARCH_RESULTS). + setQueryParameter("username", "growth").setQueryParameter("limit", "0"). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + JsonNode searchNode = response.asJson(); + Assert.assertTrue(searchNode.asText().toString().isEmpty()); + } + }); + } + + @Test + public void tstRestSearchLimitOutOfLimit() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_SEARCH_RESULTS). + setQueryParameter("username", "growth").setQueryParameter("limit", "1000"). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + Iterator searchNode = response.asJson().elements(); + testRestSearchGeneric(searchNode); + } + }); + } + + @Test + public void testRestSearchOffsetOutofLimit() { + running(testServer(TEST_SERVER_PORT, fakeApp), new Runnable() { + public void run() { + populateTestData(); + final WS.Response response = WS.url(BASE_URL + REST_SEARCH_RESULTS). + setQueryParameter("username", "growth").setQueryParameter("offset", "100"). + get().get(RESPONSE_TIMEOUT, TimeUnit.MILLISECONDS); + Iterator searchNode = response.asJson().elements(); + while (searchNode.hasNext()) { + JsonNode node = searchNode.next(); + JsonNode summaries = node.get("summaries"); + Assert.assertTrue(summaries.asText().toString().isEmpty()); + } + } + }); + } + + private void testRestSearchGeneric(Iterator searchNode) { + while (searchNode.hasNext()) { + JsonNode search = searchNode.next(); + Assert.assertEquals(search.findValue("start").asInt(), 0); + Assert.assertEquals(search.findValue("end").asInt(), 1); + Assert.assertEquals(search.findValue("total").asInt(), 1); + Assert.assertTrue(!search.findValue("summaries").isNull()); + Iterator iterator = search.findValue("summaries").elements(); + + while (iterator.hasNext()) { + JsonNode node = iterator.next(); + Assert.assertEquals(node.findValue("username").asText(), "growth"); + Assert.assertEquals(node.findValue("starttime").asLong(), 1460980616502L); + Assert.assertEquals(node.findValue("finishtime").asLong(), 1460980723925L); + Assert.assertEquals(node.findValue("waittime").asLong(), 20); + Assert.assertEquals(node.findValue("resourceused").asLong(), 100); + Assert.assertEquals(node.findValue("resourcewasted").asLong(), 30); + Assert.assertEquals(node.findValue("severity").asText(), "None"); + Assert.assertEquals(node.findValue("queue").asText(), "misc_default"); + + Iterator heuristicsSummary = node.findValue("heuristicsummary").elements(); + HashMap expectedHeuristics = new LinkedHashMap(); + expectedHeuristics.put("Mapper Data Skew", "None"); + expectedHeuristics.put("Mapper GC", "None"); + expectedHeuristics.put("Mapper Time", "None"); + expectedHeuristics.put("Mapper Speed", "None"); + expectedHeuristics.put("Mapper Spill", "None"); + expectedHeuristics.put("Mapper Memory", "None"); + expectedHeuristics.put("Reducer Data Skew", "None"); + expectedHeuristics.put("Reducer Time", "None"); + expectedHeuristics.put("Reducer GC", "None"); + expectedHeuristics.put("Reducer Memory", "None"); + expectedHeuristics.put("Shuffle & Sort", "None"); + + Iterator keyIterator = expectedHeuristics.keySet().iterator(); + while (heuristicsSummary.hasNext() && keyIterator.hasNext()) { + JsonNode job = heuristicsSummary.next(); + String key = keyIterator.next().toString(); + Assert.assertEquals(key, job.findValue("name").asText()); + Assert.assertEquals(expectedHeuristics.get(key), job.findValue("severity").asText()); + } + } + } + } + + private void populateTestData() { + try { + initDB(); + } catch (Exception e) { + e.printStackTrace(); + } + } +} diff --git a/web/app/adapters/application.js b/web/app/adapters/application.js new file mode 100644 index 000000000..a01c937a7 --- /dev/null +++ b/web/app/adapters/application.js @@ -0,0 +1,28 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import DS from 'ember-data'; +import Ember from 'ember'; + +export default DS.JSONAPIAdapter.extend({ + namespace: 'rest', +}); +export default DS.RESTAdapter.extend({ + namespace: 'rest', + pathForType: function (type) { + return Ember.String.pluralize(type); + } +}); diff --git a/web/app/app.js b/web/app/app.js new file mode 100644 index 000000000..efdd02d11 --- /dev/null +++ b/web/app/app.js @@ -0,0 +1,34 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; +import Resolver from './resolver'; +import loadInitializers from 'ember-load-initializers'; +import config from './config/environment'; + +let App; + +Ember.MODEL_FACTORY_INJECTIONS = true; + +App = Ember.Application.extend({ + modulePrefix: config.modulePrefix, + podModulePrefix: config.podModulePrefix, + Resolver +}); + +loadInitializers(App, config.modulePrefix); + +export default App; diff --git a/web/app/components/heuristic-details-list.js b/web/app/components/heuristic-details-list.js new file mode 100644 index 000000000..e7324eaea --- /dev/null +++ b/web/app/components/heuristic-details-list.js @@ -0,0 +1,20 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default Ember.Component.extend({ +}); diff --git a/web/app/components/loading-panel.js b/web/app/components/loading-panel.js new file mode 100644 index 000000000..e7324eaea --- /dev/null +++ b/web/app/components/loading-panel.js @@ -0,0 +1,20 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default Ember.Component.extend({ +}); diff --git a/web/app/components/map-reduce-exception.js b/web/app/components/map-reduce-exception.js new file mode 100644 index 000000000..e7324eaea --- /dev/null +++ b/web/app/components/map-reduce-exception.js @@ -0,0 +1,20 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default Ember.Component.extend({ +}); diff --git a/web/app/components/paging-panel.js b/web/app/components/paging-panel.js new file mode 100644 index 000000000..592a81883 --- /dev/null +++ b/web/app/components/paging-panel.js @@ -0,0 +1,51 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default Ember.Component.extend({ + + shouldShowPrevious: false, + shouldShowNext: false, + nextPageNumber: 1, + previousPageNumber: 1, + + didReceiveAttrs() { + this._super(...arguments); + let currentPage = this.get('paging.currentPage'); + + /** + * if currentPage is not first page, show previous button and assign a page number to previous button + */ + if (currentPage > 1) { + this.set('shouldShowPrevious', true); + this.set('previousPageNumber', currentPage - 1); + } else { + this.set('shouldShowPrevious', false); + this.set('previousPageNumber', 1); + } + + /** + * if currentPage is not the last page, show next button and assign a page number to next button + */ + if (currentPage != this.get("paging.numberOfPages")) { + this.set("shouldShowNext", true); + this.set("nextPageNumber", currentPage + 1); + } else { + this.set("shouldShowNext", false); + } + } +}); diff --git a/web/app/components/search-panel.js b/web/app/components/search-panel.js new file mode 100644 index 000000000..4d8684712 --- /dev/null +++ b/web/app/components/search-panel.js @@ -0,0 +1,74 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +const APPLICATION_TYPES = { + workflow: "Workflow ", job: "Job", application: "Application" +}; + +export default Ember.Component.extend({ + + searchQuery: null, + selectedType: APPLICATION_TYPES.workflow, + applicationTypes: [APPLICATION_TYPES.workflow, APPLICATION_TYPES.job, APPLICATION_TYPES.application], + selectedTypeToolTip: "Workflow execution id/url", + selectedTypePlaceHolder: "Workflow execution id/url", + + notifications: Ember.inject.service('notification-messages'), + + actions: { + selected(selectionName) { + if (selectionName === "Advanced") { + // go to advanced search when Advanced is clicked + this.get('router').transitionTo("search"); + } else { + this.set("selectedType", selectionName); + if(selectionName==APPLICATION_TYPES.workflow) { + this.set("selectedTypeToolTip","Workflow execution url/id"); + this.set("selectedTypePlaceHolder","Workflow execution url/id") + } else if (selectionName==APPLICATION_TYPES.job) { + this.set("selectedTypeToolTip","Job execution url/id"); + this.set("selectedTypePlaceHolder","Execution id/url of scheduler job(Pig,Hive)") + } else if (selectionName==APPLICATION_TYPES.application) { + this.set("selectedTypeToolTip","Yarn application" ); + this.set("selectedTypePlaceHolder","job_23423432_343/ application_23423432_343") + } + } + }, + + search() { + let searchText = this.get("searchQuery"); + let type = this.get("selectedType"); + + if (searchText === "" || searchText == null) { + this.get('notifications').error('Search field cannot be empty!', { + autoClear: true + }); + return; + } + + if (type === APPLICATION_TYPES.workflow) { + this.get('router').transitionTo('workflow', {queryParams: {workflowid: searchText}}); + } else if (type === APPLICATION_TYPES.job) { + this.get('router').transitionTo('job', {queryParams: {jobid: searchText}}); + } else if (type === APPLICATION_TYPES.application) { + this.get('router').transitionTo('app', {queryParams: {applicationid: searchText}}); + } +} +} +}); + diff --git a/web/app/components/single-heuristic-detail.js b/web/app/components/single-heuristic-detail.js new file mode 100644 index 000000000..e7324eaea --- /dev/null +++ b/web/app/components/single-heuristic-detail.js @@ -0,0 +1,20 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default Ember.Component.extend({ +}); diff --git a/web/app/components/single-tab.js b/web/app/components/single-tab.js new file mode 100644 index 000000000..62de4aa13 --- /dev/null +++ b/web/app/components/single-tab.js @@ -0,0 +1,21 @@ +/** + * + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default Ember.Component.extend({ +}); diff --git a/web/app/components/user-tabs.js b/web/app/components/user-tabs.js new file mode 100644 index 000000000..658bc31bd --- /dev/null +++ b/web/app/components/user-tabs.js @@ -0,0 +1,38 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default Ember.Component.extend({ + newUser: null, // this is binded to the text box for adding user + showInputBox: false, + actions: { + + /** + * sets showInputBox to true to show the input box + */ + showInput() { + this.set("showInputBox", true); + }, + + /** + * sets showInputBox to false to hide the input box + */ + resetInput() { + this.set("showInputBox", false); + } + } +}); diff --git a/web/app/components/user-tags.js b/web/app/components/user-tags.js new file mode 100644 index 000000000..bff77b3b6 --- /dev/null +++ b/web/app/components/user-tags.js @@ -0,0 +1,37 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default Ember.Component.extend({ + showInputBox: false, + actions: { + + /** + * sets showInputBox to true to show the input box + */ + showInput() { + this.set("showInputBox", true); + }, + + /** + * sets showInputBox to false to hide the input box + */ + resetInput() { + this.set("showInputBox", false); + } + } +}); diff --git a/web/app/controllers/app.js b/web/app/controllers/app.js new file mode 100644 index 000000000..159bc831e --- /dev/null +++ b/web/app/controllers/app.js @@ -0,0 +1,22 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default Ember.Controller.extend({ + queryParams: ['applicationid'], + applicationid: null +}); diff --git a/web/app/controllers/dashboard.js b/web/app/controllers/dashboard.js new file mode 100644 index 000000000..72af7be8b --- /dev/null +++ b/web/app/controllers/dashboard.js @@ -0,0 +1,58 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default Ember.Controller.extend({ + showInputBox: false, + notifications: Ember.inject.service('notification-messages'), + actions: { + + /** + * This action adds a new tab and clicks on it once the tab is added and rendered + * @params user The user to be added as a tab + */ + addTab(user) { + + if(user===null || user==="") { + this.get('notifications').error('The user cannot be empty', { + autoClear: true + }); + return; + } + this.users.addToUsername(user); + this.users.setActiveUser(user); + this.set('model.usernames',this.users.getUsernames()); + Ember.run.scheduleOnce('afterRender', this, function() { + Ember.$("#"+user).trigger("click"); + }); + }, + + /** + * This action deletes the tab from the list and clicks on the `all` tab + * @params tabname the tab to delete + */ + deleteTab(tabname) { + this.users.deleteUsername(tabname); + this.set('model.usernames',this.users.getUsernames()); + if(this.users.getActiveUser()===tabname) { + Ember.run.scheduleOnce('afterRender', this, function () { + Ember.$("#all a").trigger("click"); + }); + } + } + } +}); diff --git a/web/app/controllers/dashboard/app.js b/web/app/controllers/dashboard/app.js new file mode 100644 index 000000000..8b2016c9a --- /dev/null +++ b/web/app/controllers/dashboard/app.js @@ -0,0 +1,50 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import users from 'dr-elephant/models/users'; +import Dashboard from 'dr-elephant/controllers/dashboard'; + +export default Dashboard.extend({ + users: new users(), + loading: false, + + /** + * This function returns the list of usernames currently stored + * @returns The list of usernames currently stored + */ + usernames() { + return this.users.getUsernames(); + }, + + actions: { + + /** + * changes the tab to the clicked user + * @params The name of the user tab + */ + changeTab(tabname) { + this.set("loading", true); + this.users.setActiveUser(tabname); + var _this = this; + _this.store.unloadAll(); + var newApplications = this.store.query('application-summary', {username: tabname}); + newApplications.then(function () { + _this.set("model.applications", newApplications); + _this.set("loading", false); + }); + } + } +}); diff --git a/web/app/controllers/dashboard/job.js b/web/app/controllers/dashboard/job.js new file mode 100644 index 000000000..14d7532e7 --- /dev/null +++ b/web/app/controllers/dashboard/job.js @@ -0,0 +1,49 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import users from 'dr-elephant/models/users'; +import Dashboard from 'dr-elephant/controllers/dashboard'; + +export default Dashboard.extend({ + users: new users(), + loading: false, + + /** + * This function returns the list of usernames currently stored + * @returns The list of usernames currently stored + */ + usernames: function () { + return this.users.getUsernames(); + }, + actions: { + + /** + * changes the tab to the clicked user + * @params The name of the user tab + */ + changeTab(tabname) { + this.set("loading", true); + this.users.setActiveUser(tabname); + var _this = this; + _this.store.unloadAll(); + var newJobs = this.store.query('job-summary', {username: tabname}); + newJobs.then(function () { + _this.set("model.jobs", newJobs); + _this.set("loading", false); + }); + } + } +}); diff --git a/web/app/controllers/dashboard/workflow.js b/web/app/controllers/dashboard/workflow.js new file mode 100644 index 000000000..d551dd1ab --- /dev/null +++ b/web/app/controllers/dashboard/workflow.js @@ -0,0 +1,51 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; +import users from 'dr-elephant/models/users'; +import Dashboard from 'dr-elephant/controllers/dashboard'; + +export default Dashboard.extend({ + users: new users(), + loading: false, + + /** + * This function returns the list of usernames currently stored + * @returns The list of usernames currently stored + */ + usernames: function () { + return this.users.getUsernames(); + }, + actions: { + + /** + * changes the tab to the clicked user + * @params The name of the user tab + */ + changeTab(tabname) { + this.set("loading", true); + this.users.setActiveUser(tabname); + var _this = this; + _this.store.unloadAll(); + var newworkflows = this.store.query('workflow-summary', {username: tabname}); + newworkflows.then(function () { + _this.set("model.workflows", newworkflows); + _this.set("loading", false); + }); + } + + } +}); diff --git a/web/app/controllers/job.js b/web/app/controllers/job.js new file mode 100644 index 000000000..cd716dd44 --- /dev/null +++ b/web/app/controllers/job.js @@ -0,0 +1,22 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default Ember.Controller.extend({ + queryParams: ['jobid'], + jobid: null +}); diff --git a/web/app/controllers/not-found.js b/web/app/controllers/not-found.js new file mode 100644 index 000000000..357c9a771 --- /dev/null +++ b/web/app/controllers/not-found.js @@ -0,0 +1,6 @@ +import Ember from 'ember'; + +export default Ember.Controller.extend({ + queryParams: ['previous'], + previous: null, +}); diff --git a/web/app/controllers/search.js b/web/app/controllers/search.js new file mode 100644 index 000000000..2ef67f9ab --- /dev/null +++ b/web/app/controllers/search.js @@ -0,0 +1,195 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; +import moment from 'moment'; + +export default Ember.Controller.extend({ + notifications: Ember.inject.service('notification-messages'), + loading: false, + + queryParams: ['username', 'queueName', 'jobType', 'severity', 'analysis', 'finishTimeBegin', 'finishTimeEnd', + 'offset', 'limit'], + + /** query params **/ + username: null, + queueName: null, + jobType: null, + severity: null, + analysis: null, + finishTimeBegin: null, + finishTimeEnd: null, + offset: null, + limit: null, + + /** values for parameters **/ + usernameValue: null, + queueNameValue: null, + severityValue: null, + analysisValue: null, + jobTypeValue: null, + + /** values binded to form inputs **/ + finishTimeBeginValue: null, + finishTimeEndValue: null, + isJobTypeChecked: false, + isSeverityChecked: false, + isFinishDateChecked: false, + + /** pagination variables **/ + paging: null, + shouldShowPaging: false, + entriesPerPage: 20, + maxPagesToShow: 10, + currentPage: 1, + + /** + * Watcher for model. We need this watcher for paging and notifications + */ + watchModel: Ember.observer('model.summaries', function () { + var totalEntries = this.get("model.summaries.total"); + var startOfEntries = this.get("model.summaries.start"); + + var numberOfPages = Math.ceil(totalEntries / this.get("entriesPerPage")); + var startPage = Math.ceil((startOfEntries + 1) / this.get("entriesPerPage")); + + var currentPage = Math.ceil((startOfEntries + 1) / this.get("entriesPerPage")); + + var pages = []; + for (var i = startPage; i <= Math.min(numberOfPages, startPage + this.get("maxPagesToShow")); i++) { + var singleObject = {}; + singleObject['number'] = (i); + pages.push(singleObject); + } + + /** show paging when number of pages are more than one **/ + if (numberOfPages > 1) { + this.set("shouldShowPaging", true); + } else { + this.set("shouldShowPaging", false); + } + + /** set variables for paging **/ + this.set("currentPage", currentPage); + this.set("paging", {pages: pages, currentPage: currentPage, numberOfPages: numberOfPages}); + + /** show notification if no results **/ + if (this.get("model.summaries.total") == 0) { + this.get('notifications').error('No applications found for given query!', { + autoClear: true + }); + } + }), + + /** + * Watches the isJobTypeChecked boolean flag. This flag is true when the checkbox for jobtype is ticked. + * We need to tie the jobType with the value of the jobtype selection input whenever the checkbox is checked. + */ + watchJobCheck: Ember.observer('isJobTypeChecked', function () { + if (!this.get("isJobTypeChecked")) { + this.set("jobTypeValue", null); + } else { + this.set("jobTypeValue", + this.get("model.searchOptions.jobcategory").get('firstObject').jobtypes.get('firstObject').name); + } + }), + + /** + * Watches the isFinishDateChecked boolean flag. This flag is true when the checkbox for FinishDate is ticked. + * We need to tie the finishTimeBegin and finishTimeEnd with the value of the jobtype selection input whenever the checkbox is checked. + */ + watchFinishTimeCheck: Ember.observer('isFinishDateChecked', function () { + this.set("finishTimeBeginValue", null); + this.set("finishTimeEndValue", null); + }), + + /** + * Watches the isSeverityChecked boolean flag. This flag is true when the checkbox for Severity is ticked. + * We need to tie the severity and analysis with the value of the severity and analysis selection input whenever the checkbox is checked. + */ + watchSeverityCheck: Ember.observer('isSeverityChecked', function () { + if (!this.get("isSeverityChecked")) { + this.set("analysisValue", null); + this.set("severityValue", null); + } else { + this.set("severityValue", this.get("model.searchOptions.severities").get('firstObject').value); + } + }), + + /** + * Actions + **/ + actions: { + + /** + * Actions for select inputs + */ + + selectHeuristic(heuristic) { + this.set("analysisValue", heuristic); + }, + selectSeverity(severity) { + this.set("severityValue", severity); + }, + selectJobType(jobType) { + this.set("jobTypeValue", jobType); + }, + + /** + * loads the page + */ + loadPage (page) { + this.set("loading", true); + var _this = this; + this.set("offset", this.get("entriesPerPage") * (page - 1)); + this.set("limit", this.get("entriesPerPage")); + this.set("finishTimeBegin", this.get("finishTimeBeginValue")); + this.set("finishTimeEnd", this.get("finishTimeEndValue")); + this.set("severity", this.get("severityValue")); + this.set("jobType", this.get("jobTypeValue")); + this.set("username", this.get("usernameValue")); + this.set("queueName", this.get("queueNameValue")); + this.set("analysis", this.get("analysisValue")); + var newsummaries = this.store.queryRecord('search-result', { + 'username': this.username, + 'queue-name': this.queueName, + 'job-type': this.jobType, + 'severity': this.severity, + 'analysis': this.analysis, + 'finished-time-begin': moment(this.get('finishTimeBegin')).valueOf(), + 'finished-time-end': moment(this.get('finishTimeEnd')).valueOf(), + 'type': this.type, + 'offset': this.offset, + 'limit': this.limit + }); + + /** + * update model after fetching the searched data + */ + newsummaries.then(() => { + _this.set("model.summaries", newsummaries); + _this.set("loading", false); + }); + }, + + /** + * loads the first page + */ + search: function () { + this.send('loadPage', 1); + } + } +}); diff --git a/web/app/controllers/user-details.js b/web/app/controllers/user-details.js new file mode 100644 index 000000000..ebfea1fc9 --- /dev/null +++ b/web/app/controllers/user-details.js @@ -0,0 +1,196 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; +import moment from 'moment'; + +export default Ember.Controller.extend({ + usernameSet: null, + usernamesArray: null, + queryParams: ['usernames','finishTimeBegin', 'finishTimeEnd', 'sortKey', 'increasing'], + increasing: false, + usernames: null, + finishTimeBeginValue: null, + finishTimeEndValue: null, + finishTimeBegin: null, + finishTimeEnd: null, + sortKey: "severity", + showInputBox: false, + newUser: null, + loading: false, + + showUserDetails: false, + + /** paging variables **/ + paging: null, + shouldShowPaging: true, + entriesPerPage: 20, + maxPagesToShow: 10, + currentPage: 1, + + + /** + * Watcher for model. We need this watcher for paging and notifications + */ + watchModel: Ember.observer('model', function () { + + + + var totalEntries = this.get("model.total"); + if(totalEntries>0 && this.get("model")!={}) { + if(this.get("finishTimeBegin")==null && this.get("finishTimeEnd")==null) { + this.get('notifications').info('Showing result for last one week!', { + autoClear: true + }); + } + this.set("showUserDetails",true); + } else { + this.set("showUserDetails", false); + } + var startOfEntries = this.get("model.start"); + + let entriesPerPage = this.get("entriesPerPage"); + var numberOfPages = Math.ceil(totalEntries / entriesPerPage); + var startPage = Math.ceil((startOfEntries + 1) / entriesPerPage); + + var currentPage = Math.ceil((startOfEntries + 1) / this.get("entriesPerPage")); + + + var pages = []; + for (var i = startPage; i <= Math.min(numberOfPages, startPage + this.get("maxPagesToShow")); i++) { + var singleObject = {}; + singleObject['number'] = (i); + pages.push(singleObject); + } + + + /** show paging when number of pages are more than one **/ + if (numberOfPages > 1) { + this.set("shouldShowPaging", true); + } else { + this.set("shouldShowPaging", false); + } + + /** set variables for paging **/ + this.set("currentPage", currentPage); + this.set("paging", {pages: pages, currentPage: currentPage, numberOfPages: numberOfPages}); + + /** show notification if no results **/ + if (this.get("model.total") == 0) { + if(this.get("finishTimeBegin")==null && this.get("finishTimeEnd")==null) { + this.get('notifications').error('No applications found for last week. Try different date range!', { + autoClear: true + }); + } else { + this.get('notifications').error('No applications found!', { + autoClear: true + }); + } + } + }), + + actions: { + + /** + * This action adds a new tab and clicks on it once the tab is added and rendered + * @params user The user to be added as a tab + */ + addTab(user) { + if(user===null || user==="") { + this.get('notifications').error('The user cannot be empty', { + autoClear: true + }); + return; + } + this.set("usernamesArray", Array.from(this.get("usernameSet").add(user))); + this.set("usernames", this.get("usernamesArray").join(",")); + this.send('loadPage', 1); + }, + + /** + * This action deletes the tab from the list and clicks on the `all` tab + * @params tabname the tab to delete + */ + deleteTab(user) { + this.get("usernameSet").delete(user); + this.set("usernamesArray", Array.from(this.get("usernameSet"))); + this.set("usernames", this.get("usernamesArray").join(",")); + this.send('loadPage', 1); + }, + + /** + * This action searches the results based on given parameters + */ + search() { + this.send('loadPage', 1); + }, + + /** + * Selects the sort key. + * @param sortBy + */ + selectSortKey(sortBy) { + this.set("sortKey", sortBy); + }, + + /** + * This action loads the given page. + * @param page number + */ + loadPage(page) { + var _this = this; + this.set("loading", true); + this.set("offset", this.get("entriesPerPage") * (page - 1)); + this.set("limit", this.get("entriesPerPage")); + if(this.get("finishTimeBeginValue")!=null) { + this.set("finishTimeBegin", moment(this.get("finishTimeBeginValue")).valueOf()); + } else { + this.set("finishTimeBegin", null); + } + + if(this.get("finishTimeEndValue")!=null) { + this.set("finishTimeEnd", moment(this.get("finishTimeEndValue")).valueOf()); + } else { + this.set("finishTimeEnd", null); + } + this.set("jobType", this.get("jobTypeValue")); + this.set("username", this.get("usernameValue")); + + var newsummaries = this.store.queryRecord('user-detail', { + 'usernames': this.usernames, + 'finished-time-begin': this.get('finishTimeBegin'), + 'finished-time-end': this.get('finishTimeEnd'), + 'offset': this.offset, + 'limit': this.limit, + 'sortKey': this.sortKey, + 'increasing': this.increasing + }); + + + /** + * update model after fetching the searched data + */ + newsummaries.then(() => { + _this.set("model", newsummaries); + _this.set("loading", false); + }).catch(err => { + _this.set("loading", false); + this.set("showUserDetails", false); + this.set("shouldShowPaging", false); + }); + } + } +}); diff --git a/web/app/controllers/workflow-exceptions.js b/web/app/controllers/workflow-exceptions.js new file mode 100644 index 000000000..1fd906bd3 --- /dev/null +++ b/web/app/controllers/workflow-exceptions.js @@ -0,0 +1,100 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + + +import Ember from 'ember'; + +export default Ember.Controller.extend({ + queryParams: ['workflowId', 'scheduler'], + workflowId: null, + workflowIdValue: null, + loading: false, + showExceptions: false, + scheduler: null, + + showSchedulerDropdown: true, + + watchModel: Ember.observer('model.exceptionStatus', function () { + console.log(this.get('model.exceptionStatus.exceptionenabled')); + if (this.get('model.exceptionStatus.exceptionenabled') == "false") { + this.transitionToRoute('work-in-progress'); + return; + } + if (this.get("model.exceptionStatus.schedulers").length == 1) { + this.set('showSchedulerDropdown', false); + this.set('scheduler', this.get('model.exceptionStatus.schedulers')[0].name); + } + }), + + actions: { + + /** + * select the scheduler from dropdown + */ + selectScheduler(schedulerName) { + this.set("scheduler", schedulerName); + }, + + /** + * Search the exceptions + */ + search() { + this.set("showExceptions", false); + this.set("loading", true); + this.set("workflowId", this.get("workflowIdValue")); + var _this = this; + + var exceptionResults = this.store.query('workflow-exception', { + 'flow-exec-url': this.get("workflowId"), 'scheduler': this.get('scheduler') + }); + + /** + * update model after fetching the searched data + */ + exceptionResults.then(() => { + if (Ember.isEmpty(exceptionResults)) { + _this.set("showExceptions", false); + this.get('notifications').info('All applications succeeded!', { + autoClear: true + }); + _this.set("loading", false); + return; + } + _this.set("model.exceptions", exceptionResults); + _this.set("loading", false); + _this.set("showExceptions", true); + }).catch(err => { + if (err.errors[0].status == 404) { + _this.set("loading", false); + this.get('notifications').error('Workflow not found for given url', { + autoClear: true + }); + this.set("showExceptions", false); + } else if (err.errors[0].status == 503) { + _this.set("loading", false); + _this.transitionToRoute('work-in-progress'); + } else { + _this.set("loading", false); + this.set("showExceptions", false); + this.get('notifications').error('Unexpected error occured finding the exception', { + autoClear: true + }); + } + }); + + } + } +}); diff --git a/web/app/controllers/workflow.js b/web/app/controllers/workflow.js new file mode 100644 index 000000000..5621151fc --- /dev/null +++ b/web/app/controllers/workflow.js @@ -0,0 +1,22 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default Ember.Controller.extend({ + queryParams: ['workflowid'], + workflowid: null, +}); diff --git a/web/app/helpers/eq.js b/web/app/helpers/eq.js new file mode 100644 index 000000000..79c53d7ef --- /dev/null +++ b/web/app/helpers/eq.js @@ -0,0 +1,31 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +/** + * helper takes two parameters and returns true if both are equal else returns false + * @param params The parameters for the helper + * @returns {boolean} + */ +export function eq(params) { + if (params[0] === params[1]) { + return true; + } + return false; +} + +export default Ember.Helper.helper(eq); diff --git a/web/app/helpers/get-bootstrap-severity-code.js b/web/app/helpers/get-bootstrap-severity-code.js new file mode 100644 index 000000000..b38d6fe79 --- /dev/null +++ b/web/app/helpers/get-bootstrap-severity-code.js @@ -0,0 +1,41 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +/** Map to convert serverity to bootstrap class **/ +const SEVERITY_TO_BOOTSTRAP_MAP = { + critical: "danger", + severe: "severe", + moderate: "warning", + low: "success", + none:"success" +}; + +/** + * This helper takes the serverity as the parameter value and returns the corresponding bootstrap code + * @param params The parameters + * @returns one of {"danger","severe","warning","success"} + */ +export function getBootstrapSeverityCode(params) { + let [severity] = params; + if (severity == null) { + return SEVERITY_TO_BOOTSTRAP_MAP.none; + } + return SEVERITY_TO_BOOTSTRAP_MAP[severity.toLowerCase()]; +} + +export default Ember.Helper.helper(getBootstrapSeverityCode); diff --git a/web/app/helpers/get-color-for-severity.js b/web/app/helpers/get-color-for-severity.js new file mode 100644 index 000000000..d8995b60c --- /dev/null +++ b/web/app/helpers/get-color-for-severity.js @@ -0,0 +1,42 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + + +/** Map to convert severity to color **/ +const SEVERITY_TO_COLOR_CODE_MAP = { + critical: "#D9534F", + severe: "#E4804E", + moderate: "#F0AD4E", + low: "#5CB85C", + none:"#5CB85C" +}; + +/** + * Returns the color based on the severity + * @param params The severity value + * @returns The color based on the serverity + */ +export function getColorForSeverity(params) { + let [severity] = params; + if(severity==null) { + return SEVERITY_TO_COLOR_CODE_MAP.none; + } + return SEVERITY_TO_COLOR_CODE_MAP[severity.toLowerCase()]; +} + +export default Ember.Helper.helper(getColorForSeverity); diff --git a/web/app/helpers/get-date.js b/web/app/helpers/get-date.js new file mode 100644 index 000000000..4fd5dc2b1 --- /dev/null +++ b/web/app/helpers/get-date.js @@ -0,0 +1,29 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +/** + * Returns the date from milliseconds + * @param params The date to convert + * @returns The converted date + */ +export function getDate(params) { + let [date] = params; + return new Date(date); +} + +export default Ember.Helper.helper(getDate); diff --git a/web/app/helpers/get-duration-breakdown.js b/web/app/helpers/get-duration-breakdown.js new file mode 100644 index 000000000..331961661 --- /dev/null +++ b/web/app/helpers/get-duration-breakdown.js @@ -0,0 +1,45 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +const TIME = { + milliseconds_in_seconds: 1000, + seconds_in_minutes: 60, + minutes_in_hours: 60, + hours_in_days: 24 +}; + +/** + * Breaks down milliseconds to HH:MM:SS + * @param params time in milliseconds + * @returns {*} + */ +export function getDurationBreakdown(params) { + let [duration] = params; + var seconds = parseInt((duration / TIME.milliseconds_in_seconds) % TIME.seconds_in_minutes), minutes = parseInt((duration / (TIME.milliseconds_in_seconds * TIME.seconds_in_minutes)) % TIME.minutes_in_hours), hours = parseInt((duration / (TIME.milliseconds_in_seconds * TIME.seconds_in_minutes * TIME.minutes_in_hours))); + + if(durationsecond; +} + +export default Ember.Helper.helper(gt); diff --git a/web/app/helpers/not-empty.js b/web/app/helpers/not-empty.js new file mode 100644 index 000000000..26cfaa476 --- /dev/null +++ b/web/app/helpers/not-empty.js @@ -0,0 +1,32 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +/** + * Checks if a given string is empty + * @param params + * @returns {boolean} + */ +export function notEmpty(params) { + let [id] = params; + if(id=="" || id==null) { + return false; + } + return true; +} + +export default Ember.Helper.helper(notEmpty); diff --git a/web/app/helpers/url-encode.js b/web/app/helpers/url-encode.js new file mode 100644 index 000000000..97f3eb8d4 --- /dev/null +++ b/web/app/helpers/url-encode.js @@ -0,0 +1,29 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +/** + * Encodes a url + * @param params The url to encode + * @returns The encoded url + */ +export function urlEncode(params) { + let [uri] = params; + return encodeURIComponent(uri); +} + +export default Ember.Helper.helper(urlEncode); diff --git a/web/app/index.html b/web/app/index.html new file mode 100644 index 000000000..728f2e4e0 --- /dev/null +++ b/web/app/index.html @@ -0,0 +1,41 @@ + + + + + + + + DrElephant + + + + {{content-for "head"}} + + + + + {{content-for "head-footer"}} + + + {{content-for "body"}} + + + + + {{content-for "body-footer"}} + + diff --git a/web/app/initializers/component-router-injector.js b/web/app/initializers/component-router-injector.js new file mode 100644 index 000000000..eefdae122 --- /dev/null +++ b/web/app/initializers/component-router-injector.js @@ -0,0 +1,25 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +export function initialize(application) { + // Injects all Ember components with a router object: + application.inject('component', 'router', 'router:main'); +} + +export default { + name: 'component-router-injector', + initialize: initialize +}; diff --git a/web/app/initializers/notification-injector.js b/web/app/initializers/notification-injector.js new file mode 100644 index 000000000..03e522706 --- /dev/null +++ b/web/app/initializers/notification-injector.js @@ -0,0 +1,8 @@ +export function initialize(application) { + application.inject('controller', 'notifications', 'service:notification-messages'); +} + +export default { + name: 'inject-notifications', + initialize: initialize +}; diff --git a/web/app/models/application-summary.js b/web/app/models/application-summary.js new file mode 100644 index 000000000..9a2a1d5b6 --- /dev/null +++ b/web/app/models/application-summary.js @@ -0,0 +1,31 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import DS from 'ember-data'; + +export default DS.Model.extend({ + username: DS.attr("string"), + starttime: DS.attr("date"), + finishtime: DS.attr("date"), + runtime: DS.attr("string"), + waittime: DS.attr("string"), + resourceused: DS.attr("string"), + resourcewasted: DS.attr("string"), + severity: DS.attr("string"), + heuristicsummary: DS.attr(), + jobname: DS.attr("string") +}); + diff --git a/web/app/models/application.js b/web/app/models/application.js new file mode 100644 index 000000000..e1e731066 --- /dev/null +++ b/web/app/models/application.js @@ -0,0 +1,37 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import DS from 'ember-data'; + +export default DS.Model.extend({ + username: DS.attr('string'), + jobtype: DS.attr('string'), + mapreducejobname: DS.attr('string'), + starttime: DS.attr('date'), + finishtime: DS.attr('date'), + runtime: DS.attr('string'), + waittime: DS.attr('string'), + resourceused: DS.attr('string'), + resourcewasted: DS.attr('string'), + severity: DS.attr('string'), + trackingurl : DS.attr('string'), + jobexecid: DS.attr('string'), + jobdefid: DS.attr('string'), + flowexecid: DS.attr('string'), + flowdefid: DS.attr('string'), + yarnappheuristicresults: DS.attr(), + queue: DS.attr("string") +}); diff --git a/web/app/models/dashboard-summary.js b/web/app/models/dashboard-summary.js new file mode 100644 index 000000000..80d5b5930 --- /dev/null +++ b/web/app/models/dashboard-summary.js @@ -0,0 +1,26 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import DS from 'ember-data'; + +export default DS.Model.extend({ + total: DS.attr("string"), + critical: DS.attr("string"), + severe: DS.attr("string"), + moderate: DS.attr("string"), + low: DS.attr("string"), + none: DS.attr("string") +}); diff --git a/web/app/models/exception-status.js b/web/app/models/exception-status.js new file mode 100644 index 000000000..0d7bb742a --- /dev/null +++ b/web/app/models/exception-status.js @@ -0,0 +1,22 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import DS from 'ember-data'; + +export default DS.Model.extend({ + exceptionenabled: DS.attr("string"), + schedulers: DS.attr() +}); diff --git a/web/app/models/job-summary.js b/web/app/models/job-summary.js new file mode 100644 index 000000000..4da13a354 --- /dev/null +++ b/web/app/models/job-summary.js @@ -0,0 +1,43 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import DS from 'ember-data'; +import Scheduler from 'dr-elephant/utils/scheduler'; + +export default DS.Model.extend({ + jobname: DS.attr("string"), + jobtype: DS.attr("string"), + username: DS.attr("string"), + starttime: DS.attr("date"), + finishtime: DS.attr("date"), + runtime: DS.attr("string"), + waittime: DS.attr("string"), + resourceused: DS.attr("string"), + resourcewasted: DS.attr("string"), + severity: DS.attr("string"), + jobdefid: DS.attr("string"), + jobexecid: DS.attr("string"), + tasksseverity: DS.attr(), + scheduler: DS.attr("string"), + jobdisplayname: Ember.computed('jobname', 'jobdefid', 'jobexecid', 'scheduler', function () { + var jobname = this.get('jobname'); + if(jobname!=null && jobname!="") { + return jobname; + } + var scheduler = new Scheduler(); + return scheduler.getJobDisplayName(this.get('jobexecid'), this.get('jobdefid'), this.get('scheduler')); + }) +}); diff --git a/web/app/models/job.js b/web/app/models/job.js new file mode 100644 index 000000000..2f47b091f --- /dev/null +++ b/web/app/models/job.js @@ -0,0 +1,47 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import DS from 'ember-data'; +import Scheduler from 'dr-elephant/utils/scheduler'; + +export default DS.Model.extend({ + username: DS.attr("string"), + jobname: DS.attr("string"), + jobtype: DS.attr("string"), + starttime: DS.attr("date"), + finishtime: DS.attr("date"), + runtime: DS.attr("string"), + waittime: DS.attr("string"), + resourceused: DS.attr("string"), + resourcewasted: DS.attr("string"), + severity: DS.attr("string"), + jobexecid: DS.attr("string"), + jobdefid: DS.attr("string"), + flowexecid: DS.attr("string"), + flowdefid: DS.attr("string"), + taskssummaries: DS.attr(), + tasksseverity: DS.attr(), + queue: DS.attr("string"), + scheduler: DS.attr("string"), + jobdisplayname: Ember.computed('jobname', 'jobdefid', 'jobexecid', 'scheduler', function () { + var jobname = this.get('jobname'); + if(jobname!=null && jobname!="") { + return jobname; + } + var scheduler = new Scheduler(); + return scheduler.getJobDisplayName(this.get('jobexecid'), this.get('jobdefid'), this.get('scheduler')); + }) +}); diff --git a/web/app/models/search-option.js b/web/app/models/search-option.js new file mode 100644 index 000000000..a4f978926 --- /dev/null +++ b/web/app/models/search-option.js @@ -0,0 +1,22 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import DS from 'ember-data'; + +export default DS.Model.extend({ + jobcategory: DS.attr(), + severities: DS.attr() +}); diff --git a/web/app/models/search-result.js b/web/app/models/search-result.js new file mode 100644 index 000000000..5e55bcfce --- /dev/null +++ b/web/app/models/search-result.js @@ -0,0 +1,24 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import DS from 'ember-data'; + +export default DS.Model.extend({ + start: DS.attr(), + end: DS.attr(), + total: DS.attr(), + summaries: DS.attr() +}); diff --git a/web/app/models/user-detail.js b/web/app/models/user-detail.js new file mode 100644 index 000000000..51dbd1731 --- /dev/null +++ b/web/app/models/user-detail.js @@ -0,0 +1,31 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import DS from 'ember-data'; + +export default DS.Model.extend({ + totalapplications: DS.attr("string"), + totalworkflows: DS.attr("string"), + totaljobs: DS.attr("string"), + resourceused: DS.attr("string"), + resourcewasted: DS.attr("string"), + runtime: DS.attr("string"), + waittime: DS.attr("string"), + summaries: DS.attr(), + total: DS.attr(), + start: DS.attr(), + end: DS.attr() +}); diff --git a/web/app/models/users.js b/web/app/models/users.js new file mode 100644 index 000000000..a63cb1e12 --- /dev/null +++ b/web/app/models/users.js @@ -0,0 +1,96 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +/** + * Custom model to store usernames in the local html store. + */ +export default Ember.Object.extend({ + + /** + * Sets the active user to user + */ + setActiveUser: function(user) { + localStorage.setItem('active-user',user); + }, + + /** + * Returns the current active user from store + */ + getActiveUser: function() { + if(localStorage.getItem("active-user")=="null") { + return null; + } + return localStorage.getItem("active-user"); + }, + + /** + * Returns all the stored usernames + */ + getUsernames: function () { + + var usernamesString = localStorage.getItem('dr-elephant-users'); + if(usernamesString == null || usernamesString==="") { + return Ember.A([]); + } + var usernamesArray = Ember.A([]); + usernamesArray.pushObjects(usernamesString.split(",")); + return usernamesArray; + }, + + /** + * Stores the usernames + */ + storeUsernames: function () { + var usernamesString = this.usernames.join(","); + localStorage.setItem('dr-elephant-users', usernamesString); + }, + + /** + * Adds a new user to the localstore + */ + addToUsername: function (user) { + var userNames = this.getUsernames(); + if(!userNames.contains(user)) { + userNames.pushObject(user); + } + var usernamesString = userNames.join(","); + localStorage.setItem('dr-elephant-users',usernamesString); + }, + + /** + * Deletes a username from the store + */ + deleteUsername: function(user) { + var userNames = this.getUsernames(); + if(userNames.contains(user)) { + userNames.removeObject(user); + } + var usernamesString = ""; + if(userNames.length!==0) { + usernamesString = userNames.join(","); + } + localStorage.setItem('dr-elephant-users',usernamesString); + }, + + /** + * Clears the local storage + */ + clearStorage: function () { + localStorage.clear(); + } +}); diff --git a/web/app/models/workflow-exception.js b/web/app/models/workflow-exception.js new file mode 100644 index 000000000..09a43e038 --- /dev/null +++ b/web/app/models/workflow-exception.js @@ -0,0 +1,25 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import DS from 'ember-data'; + +export default DS.Model.extend({ + name: DS.attr("string"), + type: DS.attr("string"), + applications: DS.attr(), + status: DS.attr("string"), + exceptionSummary: DS.attr("string") +}); diff --git a/web/app/models/workflow-summary.js b/web/app/models/workflow-summary.js new file mode 100644 index 000000000..1ce92d09e --- /dev/null +++ b/web/app/models/workflow-summary.js @@ -0,0 +1,38 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import DS from 'ember-data'; +import Scheduler from 'dr-elephant/utils/scheduler'; + +export default DS.Model.extend({ + username: DS.attr("string"), + starttime: DS.attr("date"), + finishtime: DS.attr("date"), + runtime: DS.attr("string"), + waittime: DS.attr("string"), + resourceused: DS.attr("string"), + resourcewasted: DS.attr("string"), + severity: DS.attr("string"), + jobsseverity: DS.attr(), + queue: DS.attr("string"), + scheduler: DS.attr("string"), + flowdefid: DS.attr("string"), + flowexecid: DS.attr("string"), + flowname: Ember.computed('flowdefid', 'flowexecid', 'scheduler', function () { + var scheduler = new Scheduler(); + return scheduler.getFlowName(this.get('flowexecid'), this.get('flowdefid'), this.get('scheduler')); + }) +}); diff --git a/web/app/models/workflow.js b/web/app/models/workflow.js new file mode 100644 index 000000000..9950de46a --- /dev/null +++ b/web/app/models/workflow.js @@ -0,0 +1,39 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import DS from 'ember-data'; + +import Scheduler from 'dr-elephant/utils/scheduler'; +export default DS.Model.extend({ + username: DS.attr("string"), + starttime: DS.attr("date"), + finishtime: DS.attr("date"), + runtime: DS.attr("string"), + waittime: DS.attr("string"), + resourceused: DS.attr("string"), + resourcewasted: DS.attr("string"), + severity: DS.attr("string"), + flowexecid: DS.attr("string"), + flowdefid: DS.attr("string"), + jobssummaries: DS.attr(), + jobsseverity: DS.attr(), + queue: DS.attr("string"), + scheduler: DS.attr("string"), + flowname: Ember.computed('flowdefid', 'flowexecid', 'scheduler', function () { + var scheduler = new Scheduler(); + return scheduler.getFlowName(this.get('flowexecid'), this.get('flowdefid'), this.get('scheduler')); + }) +}); diff --git a/web/app/resolver.js b/web/app/resolver.js new file mode 100644 index 000000000..f655af272 --- /dev/null +++ b/web/app/resolver.js @@ -0,0 +1,19 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Resolver from 'ember-resolver'; + +export default Resolver; diff --git a/web/app/router.js b/web/app/router.js new file mode 100644 index 000000000..20650ed17 --- /dev/null +++ b/web/app/router.js @@ -0,0 +1,59 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; +import config from './config/environment'; + +const Router = Ember.Router.extend({ + location: config.locationType, + + metrics: Ember.inject.service(), + + didTransition() { + this._super(...arguments); + if(config.APP.enableMetrics) { + this._trackPage(); + } + }, + + _trackPage() { + Ember.run.scheduleOnce('afterRender', this, () => { + const page = this.get('url'); + const title = this.getWithDefault('currentRouteName', 'unknown'); + + Ember.get(this, 'metrics').trackPage({ page, title }); + }); + } +}); + +Router.map(function () { + this.route('dashboard', function () { + this.route('workflow'); + this.route('job'); + this.route('app'); + }); + this.route('help'); + this.route('workflow'); + this.route('job'); + this.route('app'); + this.route('search'); + this.route('not-found'); + this.route('user-details'); + this.route('workflow-exceptions'); + this.route('work-in-progress'); +}); + +export default Router; diff --git a/web/app/routes/app.js b/web/app/routes/app.js new file mode 100644 index 000000000..5fd08cb04 --- /dev/null +++ b/web/app/routes/app.js @@ -0,0 +1,36 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default Ember.Route.extend({ + + beforeModel: function (transition) { + this.applicationid = transition.queryParams.applicationid; + }, + + model(){ + this.applications = this.store.queryRecord('application', {applicationid: this.get("applicationid")}); + return this.applications; + }, + actions: { + error(error, transition) { + if (error.errors[0].status == 404) { + return this.transitionTo('not-found', { queryParams: {'previous': window.location.href}}); + } + } + } +}); diff --git a/web/app/routes/dashboard.js b/web/app/routes/dashboard.js new file mode 100644 index 000000000..714e1c80f --- /dev/null +++ b/web/app/routes/dashboard.js @@ -0,0 +1,27 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default Ember.Route.extend({ + afterModel() { + this.transitionTo('dashboard.workflow'); + }, + model(){ + this.dashboardSummary = this.store.queryRecord('dashboard-summary',{}); + return this.dashboardSummary; + } +}); diff --git a/web/app/routes/dashboard/app.js b/web/app/routes/dashboard/app.js new file mode 100644 index 000000000..642fdd767 --- /dev/null +++ b/web/app/routes/dashboard/app.js @@ -0,0 +1,43 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; +import Users from 'dr-elephant/models/users'; + +export default Ember.Route.extend({ + users: new Users(), + beforeModel(){ + this.usernames = this.users.getUsernames(); + this.set('usernames',this.users.getUsernames()); + }, + model(){ + /** Do not load applications model here, they will be loaded in afterModel **/ + return Ember.RSVP.hash({ + usernames: this.users.getUsernames(), + applications: {} + }); + }, + afterModel() { + /** once the page is rendered, click on the current user tab **/ + Ember.run.scheduleOnce('afterRender', this, function() { + if(this.users.getActiveUser()==null) { + Ember.$("#all a").trigger("click"); + } else { + Ember.$("#" + this.users.getActiveUser()).trigger("click"); + } + }); + } +}); diff --git a/web/app/routes/dashboard/job.js b/web/app/routes/dashboard/job.js new file mode 100644 index 000000000..0a5903b35 --- /dev/null +++ b/web/app/routes/dashboard/job.js @@ -0,0 +1,43 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; +import Users from 'dr-elephant/models/users'; + +export default Ember.Route.extend({ + users: new Users(), + beforeModel(){ + this.usernames = this.users.getUsernames(); + this.set('usernames',this.users.getUsernames()); + }, + model(){ + /** Do not load jobs here, jobs will be loaded in afterModel **/ + return Ember.RSVP.hash({ + usernames: this.users.getUsernames(), + jobs: {} + }); + }, + afterModel() { + /** once the page is rendered, click on the current user tab **/ + Ember.run.scheduleOnce('afterRender', this, function() { + if(this.users.getActiveUser()==null) { + Ember.$("#all a").trigger("click"); + } else { + Ember.$("#" + this.users.getActiveUser()).trigger("click"); + } + }); + } +}); diff --git a/web/app/routes/dashboard/workflow.js b/web/app/routes/dashboard/workflow.js new file mode 100644 index 000000000..8c725b50c --- /dev/null +++ b/web/app/routes/dashboard/workflow.js @@ -0,0 +1,44 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; +import Users from 'dr-elephant/models/users'; + +export default Ember.Route.extend({ + + users: new Users(), + beforeModel(){ + this.usernames = this.users.getUsernames(); + this.set('usernames',this.users.getUsernames()); + }, + model(){ + /** do not load workflows here, workflows will be loaded in afterModel **/ + return Ember.RSVP.hash({ + usernames: new Users().getUsernames(), + workflows: {} + }); + }, + afterModel() { + /** once the page is rendered, click on the active user tab **/ + Ember.run.scheduleOnce('afterRender', this, function() { + if(this.users.getActiveUser()==null) { + Ember.$("#all a").trigger("click"); + } else { + Ember.$("#" + this.users.getActiveUser()).trigger("click"); + } + }); + } +}); diff --git a/web/app/routes/index.js b/web/app/routes/index.js new file mode 100644 index 000000000..1de64fce2 --- /dev/null +++ b/web/app/routes/index.js @@ -0,0 +1,23 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default Ember.Route.extend({ + beforeModel() { + this.transitionTo('dashboard.workflow'); + } +}); diff --git a/web/app/routes/job.js b/web/app/routes/job.js new file mode 100644 index 000000000..46362b9b4 --- /dev/null +++ b/web/app/routes/job.js @@ -0,0 +1,34 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default Ember.Route.extend({ + beforeModel: function (transition) { + this.jobid = transition.queryParams.jobid; + }, + model(){ + this.jobs = this.store.queryRecord('job', {jobid: this.get("jobid")}); + return this.jobs; + }, + actions: { + error(error, transition) { + if (error.errors[0].status == 404) { + return this.transitionTo('not-found', { queryParams: {'previous': window.location.href}}); + } + } + } +}); diff --git a/web/app/routes/not-found.js b/web/app/routes/not-found.js new file mode 100644 index 000000000..cdd575783 --- /dev/null +++ b/web/app/routes/not-found.js @@ -0,0 +1,5 @@ +import Ember from 'ember'; + +export default Ember.Route.extend({ + +}); diff --git a/web/app/routes/search.js b/web/app/routes/search.js new file mode 100644 index 000000000..f92c5fcbe --- /dev/null +++ b/web/app/routes/search.js @@ -0,0 +1,49 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default Ember.Route.extend({ + model(){ + return Ember.RSVP.hash({ + searchOptions: this.store.queryRecord('search-option', {}), + summaries: this.store.queryRecord('search-result', { + 'username': this.username, + 'queue-name': this.queueName, + 'job-type': this.jobType, + 'severity': this.severity, + 'analysis': this.analysis, + 'finish-time-begin': this.finishTimeBegin, + 'finish-time-end': this.finishTimeEnd, + 'type': this.type, + 'offset': this.offset, + 'limit': this.limit + }) + }); + }, + beforeModel(transition) { + this.username = transition.queryParams.username; + this.queueName = transition.queryParams.queueName; + this.jobType = transition.queryParams.jobType; + this.severity = transition.queryParams.severity; + this.analysis = transition.queryParams.analysis; + this.finishTimeBegin = transition.queryParams.finishTimeBegin; + this.finishTimeEnd = transition.queryParams.finishTimeEnd; + this.type = transition.queryParams.type; + this.offset = transition.queryParams.offset; + this.limit = transition.queryParams.limit; + } +}); diff --git a/web/app/routes/user-details.js b/web/app/routes/user-details.js new file mode 100644 index 000000000..e255f685a --- /dev/null +++ b/web/app/routes/user-details.js @@ -0,0 +1,77 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default Ember.Route.extend({ + + notifications: Ember.inject.service('notification-messages'), beforeModel(transition) { + this.finishTimeBegin = transition.queryParams.finishTimeBegin; + this.finishTimeEnd = transition.queryParams.finishTimeEnd; + this.sortKey = transition.queryParams.sortKey; + this.increasing = transition.queryParams.increasing; + this.users = transition.queryParams.usernames; + if (this.users != "" && this.users != null) { + this.set("usernames", transition.queryParams.usernames.split(",")); + } else { + this.set("usernames", {}); + } + }, + + model() { + if (this.users != null && this.users != "") { + let userdetails = this.store.queryRecord('user-detail', { + 'usernames': this.users, + 'finished-time-begin': this.finishTimeBegin, + 'finished-time-end': this.finishTimeEnd, + 'sortKey': this.sortKey, + 'increasing': this.increasing + }); + return userdetails; + } else { + return null; + } + }, + + actions: { + error(error, transition) { + if (error.errors[0].status == 404) { + this.get('notifications').error('No applications found for given query!', { + autoClear: true, + }); + this.set("showUserDetails", false); + } + } + }, + + setupController: function (controller, model) { + if (model == null) { + controller.set("showUserDetails", false); + controller.set("usernameSet", new Set()); + controller.set("usernamesArray", Ember.A([])); + return; + } + controller.set('model', model); + controller.set("showUserDetails", true); + + let usernameSet = new Set(); + for (var i = 0; i < this.get('usernames').length; i++) { + usernameSet.add(this.get('usernames')[i]); + } + controller.set("usernameSet", usernameSet); + controller.set("usernamesArray", Array.from(usernameSet)); + } +}); diff --git a/web/app/routes/work-in-progress.js b/web/app/routes/work-in-progress.js new file mode 100644 index 000000000..8dad43593 --- /dev/null +++ b/web/app/routes/work-in-progress.js @@ -0,0 +1,20 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default Ember.Route.extend({ +}); diff --git a/web/app/routes/workflow-exceptions.js b/web/app/routes/workflow-exceptions.js new file mode 100644 index 000000000..3fac39b5a --- /dev/null +++ b/web/app/routes/workflow-exceptions.js @@ -0,0 +1,81 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default Ember.Route.extend({ + notifications: Ember.inject.service('notification-messages'), + + model(){ + let exceptionStatuses = this.store.queryRecord('exception-status', {}); + let exceptionValues = null; + if (this.workflowId == null || this.workflowId == "") { + exceptionValues = null; + } else { + exceptionValues = this.store.query('workflow-exception', { + 'flow-exec-url': this.workflowId, 'scheduler': this.scheduler + }); + } + return Ember.RSVP.hash({ + exceptionStatus: exceptionStatuses, exceptions: exceptionValues + }); + + }, + + beforeModel(transition) { + this.workflowId = transition.queryParams.workflowId; + }, + + actions: { + error(error, transition) { + console.log("error occured"); + if (error.errors[0].status == 404) { + this.get('notifications').error('No applications found for given query!', { + autoClear: true + }); + this.set("showExceptions", false); + } else if (error.errors[0].status == 503) { + this.set("loading", false); + this.get('notifications').error("This feature is still in progress", { + autoClear: true + }); + this.transitionTo('work-in-progress'); + } else { + console.log("error occured"); + this.get('notifications').error('Unexpected error occured!', { + autoClear: true + }); + this.set("showExceptions", false); + } + } + }, + + setupController: function (controller, model) { + controller.set("model", model); + if (model.exceptions == null) { + controller.set("showExceptions", false); + return; + } else if (Ember.isEmpty(model.exceptions)) { + controller.set("showExceptions", false); + + this.get('notifications').info('All applications succeeded!', { + autoClear: true + }); + return; + } + controller.set("showExceptions", true); + } +}); diff --git a/web/app/routes/workflow.js b/web/app/routes/workflow.js new file mode 100644 index 000000000..9fe090faa --- /dev/null +++ b/web/app/routes/workflow.js @@ -0,0 +1,41 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default Ember.Route.extend({ + + notifications: Ember.inject.service('notification-messages'), + beforeModel: function(transition){ + this.workflowid = transition.queryParams.workflowid; + }, + model(){ + this.workflows = this.store.queryRecord('workflow',{workflowid: this.get("workflowid")}); + return this.workflows; + }, + actions: { + error(error, transition) { + if (error.errors[0].status == 404) { + return this.transitionTo('not-found', { queryParams: {'previous': window.location.href}}); + } else { + this.get('notifications').error('Uh-oh! Something went wrong..', { + autoClear: true + }); + return; + } + } + } +}); diff --git a/web/app/serializers/app.js b/web/app/serializers/app.js new file mode 100644 index 000000000..72aab82e7 --- /dev/null +++ b/web/app/serializers/app.js @@ -0,0 +1,20 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import DS from 'ember-data'; + +export default DS.RESTSerializer.extend({ +}); diff --git a/web/app/serializers/application-summary.js b/web/app/serializers/application-summary.js new file mode 100644 index 000000000..72aab82e7 --- /dev/null +++ b/web/app/serializers/application-summary.js @@ -0,0 +1,20 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import DS from 'ember-data'; + +export default DS.RESTSerializer.extend({ +}); diff --git a/web/app/serializers/application.js b/web/app/serializers/application.js new file mode 100644 index 000000000..72aab82e7 --- /dev/null +++ b/web/app/serializers/application.js @@ -0,0 +1,20 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import DS from 'ember-data'; + +export default DS.RESTSerializer.extend({ +}); diff --git a/web/app/serializers/job-summary.js b/web/app/serializers/job-summary.js new file mode 100644 index 000000000..72aab82e7 --- /dev/null +++ b/web/app/serializers/job-summary.js @@ -0,0 +1,20 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import DS from 'ember-data'; + +export default DS.RESTSerializer.extend({ +}); diff --git a/web/app/serializers/job.js b/web/app/serializers/job.js new file mode 100644 index 000000000..72aab82e7 --- /dev/null +++ b/web/app/serializers/job.js @@ -0,0 +1,20 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import DS from 'ember-data'; + +export default DS.RESTSerializer.extend({ +}); diff --git a/web/app/serializers/workflow-summary.js b/web/app/serializers/workflow-summary.js new file mode 100644 index 000000000..72aab82e7 --- /dev/null +++ b/web/app/serializers/workflow-summary.js @@ -0,0 +1,20 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import DS from 'ember-data'; + +export default DS.RESTSerializer.extend({ +}); diff --git a/web/app/serializers/workflow.js b/web/app/serializers/workflow.js new file mode 100644 index 000000000..72aab82e7 --- /dev/null +++ b/web/app/serializers/workflow.js @@ -0,0 +1,20 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import DS from 'ember-data'; + +export default DS.RESTSerializer.extend({ +}); diff --git a/web/app/styles/app.css b/web/app/styles/app.css new file mode 100644 index 000000000..f87eca85e --- /dev/null +++ b/web/app/styles/app.css @@ -0,0 +1,202 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +@import 'partials/application.css'; +@import 'partials/navbar.css'; +@import 'partials/dashboard-header.css'; +@import 'partials/jobs-severity.css'; +@import 'partials/aggregated-metrics.css'; +@import 'partials/breadcrumbs.css'; +@import 'partials/user-tabs.css'; +@import 'partials/application-menu.css'; +@import 'partials/heuristics-summary.css'; +@import 'partials/search.css'; +@import 'partials/heuristic-detail.css'; +@import 'partials/loading-panel.css'; +@import 'partials/search-panel.css'; +@import 'partials/not-found.css'; +@import 'partials/user-detail.css'; +@import 'partials/user-tags.css'; + +.list-group-item { + border-radius: 0 !important; + padding: 10px 10px 10px 10px; + border: 0px; +} + +.shadow { + box-shadow: 0 1px 8px -2px rgb(82, 82, 82); +} + +.box { + position: relative; + border: none; +} + +.borderless td, .borderless th { + border: none; + border-top: 0px; +} + +.severity-Severe { + color: #E4804E !important; +} + +.severity-Critical { + color: #D9534F !important; +} + +.severity-Moderate { + color: #F0AD4E !important; +} + +.severity-Low { + color: #5CB85C !important; +} + +.severity-None { + color: #5CB85C !important; +} + +.app-header { + color: #5e6365; + font-family: 'Shift', sans-serif; + margin-left: 10px; + width: 100%; + margin-bottom: 5px; +} + +.horizontal-line { + margin: 0px; + padding: 0px; +} + +.date-header { + color: #797e80; + font-family: 'Shift', sans-serif; + font-size: 12px; + padding-left: 10px; +} + +.app-summary-table { + width: 100%; +} + +.severity-side-blob-generic { + height: 100%; + width: 4px; +} + +.severity-side-blob-success { + background: #5CB85C; +} + +.severity-side-blob-warning { + background: #F0AD4E; +} + +.severity-side-blob-severe { + background: #E4804E; +} + +.severity-side-blob-danger { + background: #D9534F; +} + +.user { + padding-right: 15px; + text-align: right; + width: 250px; +} + +.heading { + font-size: 16px; +} + +.severity-None { + color: #5CB85C; +} + +.details-container { + color: #5e6365; + font-family: 'Shift', sans-serif; + background: white; + margin: 10px 0px 10px 0px; + padding: 10px 15px 10px 25px; +} + +.info { + margin-top: 10px; +} + +.info td, tr { + height: 20px; + margin: 7px; + padding: 7px; + max-width: 100%; +} + +.summary-list-container { + background: white; + margin-top: 30px; + padding-top: 3px; + border-radius: 5px; + margin-bottom: 20px; +} + +.summary-list-header { + margin: 10px 0px 10px 25px; + color: #5e6365; + font-size: 18px; +} + +.bold { + font-weight: bold; +} + +.search-box { + color: #5e6365; + font-family: 'Shift', sans-serif; + background: white; + margin: 30px 0px 10px 0px; + padding: 10px 15px 10px 25px; +} + + +.cp-Panel { + /*border: 1px solid #e9e9e9;*/ + border: 1px solid rgba(70, 167, 211, 0.34); +} +.cp-Panel-toggle { + display: block; + padding: 1em; + +} +.cp-Panel-toggle:link { + text-decoration: none; +} +.cp-Panel-toggle:hover { + background-color: #46A7D3; + color: white; + /*border: 1px solid #46A7D3;*/ +} +.cp-Panel-body-inner { + padding: 1em; +} + +.workflow-panel-cp { + margin: 10px 10px 10px 10px; +} diff --git a/web/app/styles/partials/aggregated-metrics.css b/web/app/styles/partials/aggregated-metrics.css new file mode 100644 index 000000000..79c228c34 --- /dev/null +++ b/web/app/styles/partials/aggregated-metrics.css @@ -0,0 +1,39 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +.aggregated-metrics-block { + font-size: 13px; + color: #5e6365; + font-family: 'Shift', sans-serif; + padding-left: 10px; +} + +.usedmemory-td { + width: 175px; +} + +.wastedmemory-td { + width: 100px +} + +.runtime-td { + width: 120px; +} + +.waittime-td { + width: 100px; +} + diff --git a/web/app/styles/partials/application-menu.css b/web/app/styles/partials/application-menu.css new file mode 100644 index 000000000..da5776dc4 --- /dev/null +++ b/web/app/styles/partials/application-menu.css @@ -0,0 +1,44 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/** Dashboard menu **/ +.menu-workflow { + width: 100%; +} + +.wf-button { + background-color: white; /* Green */ + display: inline-block; + cursor: pointer; + border-radius: 3px; + font-size: 14px; + font-weight: 500; + border: #0084BF 1px solid; + color: #0084BF; +} + +.wf-button:hover { + background-color: #008cc9; + color: white; +} + +.menu-workflow > .active { + background-color: #008cc9; + color: white; + opacity: 0.7; + border: 1px #008cc9 solid; + box-shadow: none; +} diff --git a/web/app/styles/partials/application.css b/web/app/styles/partials/application.css new file mode 100644 index 000000000..3ffc8419e --- /dev/null +++ b/web/app/styles/partials/application.css @@ -0,0 +1,19 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +body { + background: #E7E9EC !important; /* Adding !important forces the browser to overwrite the default style applied by Bootstrap */ +} diff --git a/web/app/styles/partials/breadcrumbs.css b/web/app/styles/partials/breadcrumbs.css new file mode 100644 index 000000000..33d7b66b8 --- /dev/null +++ b/web/app/styles/partials/breadcrumbs.css @@ -0,0 +1,117 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/** breadcrumb **/ +.bread-crumb { + display: inline-block; + overflow: hidden; + background: #0084BF; + margin-top: 20px; +} + +.bread-crumb a { + text-decoration: none; + outline: none; + display: block; + float: left; + font-size: 12px; + line-height: 36px; + color: white; + padding: 0 10px 0 30px; + background: #666; + background: linear-gradient(#666, #333); + position: relative; +} + +/*since the first link does not have a triangle before it we can reduce the left padding to make it look consistent with other links*/ +.bread-crumb a:first-child { + padding-left: 10px; + border-radius: 5px 0 0 5px; /*to match with the parent's radius*/ +} + +.bread-crumb a:last-child { + border-radius: 0 5px 5px 0; /*this was to prevent glitches on hover*/ + padding-right: 20px; + +} + +/*hover/active styles*/ +.bread-crumb a.active, .bread-crumb a:hover { + background: #333; + background: linear-gradient(#333, #000); +} + +.bread-crumb a.active:after, .bread-crumb a:hover:after { + background: #333; + background: linear-gradient(135deg, #333, #000); +} + +/*adding the arrows for the breadcrumbs using rotated pseudo elements*/ +.bread-crumb a:after { + content: ''; + position: absolute; + top: 0; + right: -18px; /*half of square's length*/ + width: 36px; + height: 36px; + transform: scale(0.707) rotate(45deg); + /*we need to prevent the arrows from getting buried under the next link*/ + z-index: 1; + /*background same as links but the gradient will be rotated to compensate with the transform applied*/ + background: #666; + background: linear-gradient(135deg, #666, #333); + /*stylish arrow design using box shadow*/ + box-shadow: 2px -2px 0 2px rgba(0, 132, 191, 1), + 3px -3px 0 2px rgba(255, 255, 255, 0.1); + /* + 5px - for rounded arrows and + 50px - to prevent hover glitches on the border created using shadows*/ + border-radius: 0 5px 0 50px; +} + +/*we dont need an arrow after the last link*/ +.bread-crumb a:last-child:after { + content: none; +} + +/*we will use the :before element to show numbers*/ +.bread-crumb a:before { + border-radius: 100%; + height: 20px; + line-height: 20px; + position: absolute; + top: 0; + background: #444; + background: linear-gradient(#444, #222); + font-weight: bold; +} + +.flat a, .flat a:after { + background: white; + color: #5e6365; + transition: all 0.5s; +} + +.flat a:before { + background: white; + box-shadow: 0 0 0 1px #ccc; +} + +.flat a:hover, .flat a.active, +.flat a:hover:after, .flat a.active:after { + background: #0084bf; + color: white; +} diff --git a/web/app/styles/partials/dashboard-header.css b/web/app/styles/partials/dashboard-header.css new file mode 100644 index 000000000..f399090ea --- /dev/null +++ b/web/app/styles/partials/dashboard-header.css @@ -0,0 +1,73 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/** +Dashboard header +*/ +.dashboard-header { + color: #707577; + font-family: 'Shift', sans-serif; + width: 100%; + margin-bottom: 20px; +} + +/** +speech bubble +**/ +.bubble { + position: relative; + width: 100%; + height: 150px; + background: #FFFFFF; + -webkit-border-radius: 10px; + -moz-border-radius: 10px; + border-radius: 10px; + border: #0084BF solid 1px; + padding-left: 50px; + margin-top: 30px; + padding-top: 8px; +} + +.bubble:after { + content: ''; + position: absolute; + border-style: solid; + border-width: 14px 25px 14px 0; + border-color: transparent #FFFFFF; + display: block; + width: 0; + z-index: 1; + left: -25px; + top: 55px; +} + +.bubble:before { + content: ''; + position: absolute; + border-style: solid; + border-width: 14px 25px 14px 0; + border-color: transparent #0084BF; + display: block; + width: 0; + z-index: 0; + left: -26px; + top: 55px; +} + +.logo { + width: 50px; + padding-left: 30px; +} diff --git a/web/app/styles/partials/heuristic-detail.css b/web/app/styles/partials/heuristic-detail.css new file mode 100644 index 000000000..effc99e66 --- /dev/null +++ b/web/app/styles/partials/heuristic-detail.css @@ -0,0 +1,28 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +.heuristic-item:hover { + background: grey; +} + +.heuristic-detail-table { + width: 100%; + margin: 10px 20px 10px 20px; +} + +.heuristic-detail-table td { + width: 50%; +} diff --git a/web/app/styles/partials/heuristics-summary.css b/web/app/styles/partials/heuristics-summary.css new file mode 100644 index 000000000..a6b6d537b --- /dev/null +++ b/web/app/styles/partials/heuristics-summary.css @@ -0,0 +1,57 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +.heuristics-block { + padding-top: 10px; + padding-bottom: 15px; + padding-left: 10px; +} + +.heuristics-label-generic { + background-color: white; + padding-top: 3px; + padding-right: 5px; + padding-left: 5px; + margin-right: 7px; + margin-bottom: 2px; + padding-bottom: 3px; + text-align: center; + display: inline-block; + font-size: 11px; + float: left; + border-radius: 3px; + font-family: sans-serif; +} + +.heuristics-label-severe { + border: #E4804E 1px solid; + color: #E4804E; +} + +.heuristics-label-success { + border: #5CB85C 1px solid; + color: #5CB85C; +} + +.heuristics-label-warning { + border: #F0AD4E 1px solid; + color: #F0AD4E; +} + +.heuristics-label-danger { + border: #D9534F 1px solid; + color: #D9534F; +} diff --git a/web/app/styles/partials/jobs-severity.css b/web/app/styles/partials/jobs-severity.css new file mode 100644 index 000000000..45adc41d4 --- /dev/null +++ b/web/app/styles/partials/jobs-severity.css @@ -0,0 +1,57 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/** +jobs severity +**/ + +.horizontal-list { + margin: 0; + padding: 0; + list-style-type: none; +} + +.horizontal-list li { + float: left; +} + +.scnd-font-color { + color: #9099b7; +} + +/** severity text **/ + +.severity-horizontal-list li { + width: 75px; + border: none; + text-align: center; +} + +.severity-text { + margin: 0; + padding: 0px 0 5px; + font-size: 12px; +} + +.severity-count { + margin: 0; + padding: 0 0 0px 0px; + font-size: 20px; +} + +.job-severity-block { + padding-bottom: 5px; +} diff --git a/web/app/styles/partials/loading-panel.css b/web/app/styles/partials/loading-panel.css new file mode 100644 index 000000000..d48fb3a80 --- /dev/null +++ b/web/app/styles/partials/loading-panel.css @@ -0,0 +1,23 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +.loading-panel-div { + background: white; width: 100%;height: 800px; opacity: 0.5; +} + +.loading-image { + margin: auto; display: block; padding-top: 200px; +} diff --git a/web/app/styles/partials/navbar.css b/web/app/styles/partials/navbar.css new file mode 100644 index 000000000..0392c6c72 --- /dev/null +++ b/web/app/styles/partials/navbar.css @@ -0,0 +1,80 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/** navigation bar **/ + +.navbar-default { + background-color: #ffffff; + border-color: #e8e8e8; +} + +.navbar-default .navbar-brand { + color: #6a7478; +} + +.navbar-default .navbar-brand:hover, +.navbar-default .navbar-brand:focus { + color: #0084bf; +} + +.navbar-default .navbar-text { + color: #6a7478; +} + +.navbar-default .navbar-nav > li > a { + color: #6a7478; +} + +.navbar-default .navbar-nav > li > a:hover, +.navbar-default .navbar-nav > li > a:focus { + color: #0084bf; +} + +.navbar-default .navbar-nav > .active > a, +.navbar-default .navbar-nav > .active > a:hover, +.navbar-default .navbar-nav > .active > a:focus { + color: #0084bf; + background-color: #e8e8e8; +} + +.navbar-default .navbar-nav > .open > a, +.navbar-default .navbar-nav > .open > a:hover, +.navbar-default .navbar-nav > .open > a:focus { + color: #0084bf; + background-color: #e8e8e8; +} + +.navbar-default .navbar-toggle { + border-color: #e8e8e8; +} + +.navbar-default .navbar-toggle:hover, +.navbar-default .navbar-toggle:focus { + background-color: #e8e8e8; +} + +/** Navigation extension **/ + +.nav-extension { + background-color: #0084BF; + height: 120px; + border: none; + margin-top: -20px; + padding: 0px; + z-index: -1; + position: absolute; + width: 100%; +} diff --git a/web/app/styles/partials/not-found.css b/web/app/styles/partials/not-found.css new file mode 100644 index 000000000..677506d8b --- /dev/null +++ b/web/app/styles/partials/not-found.css @@ -0,0 +1,21 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +.not-found-container { + margin-left: 100px; + margin-top: 100px; + margin-bottom: 100px; +} \ No newline at end of file diff --git a/web/app/styles/partials/search-panel.css b/web/app/styles/partials/search-panel.css new file mode 100644 index 000000000..a003ca61b --- /dev/null +++ b/web/app/styles/partials/search-panel.css @@ -0,0 +1,20 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +.search-component { + height: 35px; + margin: 0px; +} diff --git a/web/app/styles/partials/search.css b/web/app/styles/partials/search.css new file mode 100644 index 000000000..18be1b52c --- /dev/null +++ b/web/app/styles/partials/search.css @@ -0,0 +1,63 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +.dropdown.dropdown-lg .dropdown-menu { + margin-top: -1px; + padding: 6px 20px; +} + +.input-group-btn .btn-group { + display: flex !important; +} + +.form-horizontal .form-group { + margin-left: 0; + margin-right: 0; +} + +.form-group .form-control:last-child { + border-top-left-radius: 4px; + border-bottom-left-radius: 4px; +} + +@media screen and (min-width: 300px) { + #adv-search { + width: 300px; + margin: 0 auto; + } + + .dropdown.dropdown-lg { + position: static !important; + } + + .dropdown.dropdown-lg .dropdown-menu { + min-width: 300px; + } +} + +.search-row { + height: 100%; +} + +.search-table { + width: 100%; +} + +.search-table td { + padding-right: 20px; + padding-left: 20px; + max-height: 100px; +} diff --git a/web/app/styles/partials/user-detail.css b/web/app/styles/partials/user-detail.css new file mode 100644 index 000000000..a5bced0d5 --- /dev/null +++ b/web/app/styles/partials/user-detail.css @@ -0,0 +1,104 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +.user-detail-general { + color: #5e6365; + font-family: 'Shift', sans-serif; + background: white; + padding: 10px 15px 10px 25px; + width: 100%; + margin: 15px 0px 10px 0px; + min-height: 200px; + +} + +.user-top { + min-height: 200px; + margin-top: 30px; +} + +.application-count-block { + min-width: 250px; + height: 100px; + margin: 20px 10px 10px 10px; + padding: 0px; + float: left; + box-shadow: 0 2px 8px -2px rgb(82, 82, 82); + +} + +.application-count { + min-height: 70px; + margin: 0; + padding: 5px 10px 5px 10px; + /*background: #3998c3;*/ + border: 1px solid #3998c3; + color: #3998c3; + /*color: white;*/ + font-size: 30px; + text-align: center; + vertical-align: central; + background: white; +} + +.application-type { + min-height: 30px; + padding: 5px 5px 5px 5px; + text-align: center; + font-size: 14px; + color: grey; + background: white; +} + +.application-count-workflow { + border: 1px solid #5cb85c; + color: #5cb85c; +} + +.application-count-job { + border: 1px solid #5cb85c; + color: #5cb85c; +} + +.application-count-application { + border: 1px solid #5cb85c; + color: #5cb85c; +} + +.date-filter { + margin: 10px 10px 10px 10px; +} +.date-block { + width: 200px; + float: left; + margin: 10px 10px 10px 10px; + min-height: 80px; +} + +.application-critical { + color: #D9534F; + border: 1px solid #D9534F; +} + +.application-normal { + border: 1px solid #3998c3; + color: #3998c3; +} + +.wrapper-for-details { + min-height: 250px; + overflow: auto; +} \ No newline at end of file diff --git a/web/app/styles/partials/user-tabs.css b/web/app/styles/partials/user-tabs.css new file mode 100644 index 000000000..e0dc15184 --- /dev/null +++ b/web/app/styles/partials/user-tabs.css @@ -0,0 +1,72 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/** input user **/ + +.input-user-li { + height: 40px; + height: 100%; + border-radius: 3px; + margin-left: 5px; + border: 1px solid #dfdfdf; + background: white; + border-top-left-radius: 5px; + border-top-right-radius: 5px; +} + +.input-user { + height: 40px; + border-top-left-radius: 7px; + border-top-right-radius: 7px; + border: none; + padding-left: 20px; + width: 100px; +} + +.input-user-button { + height: 38px; + border: none; + color: #0084BF; + width: 30px; + margin-right: 5px; + margin-left: 5px; + border-top-left-radius: 3px; + border-top-right-radius: 3px; + background: white; + margin-bottom: 2px; +} + +/** + Style for the user tabs + **/ +.nav-tab { + padding: 16px 16px 8px; + margin: 0 16px; + display: -webkit-box; + display: -webkit-flex; + display: -ms-flexbox; + display: flex; + -webkit-box-pack: center; + -ms-flex-pack: center; + -webkit-justify-content: center; + -moz-justify-content: center; + justify-content: center; + position: relative; +} + +.single-tab { + height: 40px; +} diff --git a/web/app/styles/partials/user-tags.css b/web/app/styles/partials/user-tags.css new file mode 100644 index 000000000..07175a2b5 --- /dev/null +++ b/web/app/styles/partials/user-tags.css @@ -0,0 +1,25 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +.user-tags { + margin: 10px 10px 10px 10px; + padding: 5px 5px 5px 5px; +} + +.tag-list { + margin-right: 10px; + padding:10px; +} \ No newline at end of file diff --git a/web/app/templates/app.hbs b/web/app/templates/app.hbs new file mode 100644 index 000000000..f1c58a38b --- /dev/null +++ b/web/app/templates/app.hbs @@ -0,0 +1,113 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + + + +
+ + + + +
+

{{model.id}}

+
+ + + + + + + + + + + + + + + + {{#if (notEmpty model.mapreducejobname)}} + + + + + {{/if}} + + + + + +
username{{model.username}}
Queue{{model.queue}}
jobtype{{model.jobtype}}
jobname{{model.mapreducejobname}}
jobtracker{{model.id}}
+ + {{#if (notEmpty model.flowexecid)}} + + + + + + + + + + + +
FlowHistory Flow + definition Flow + execution
JobHistory Job + definition Job + execution
+ {{/if}} + + + + + + + + + + +
+ {{heuristics-summary heuristics=model.yarnappheuristicresults}} +
+ {{aggregated-metrics application=model}} +
+
+ +
+
Heuristic Details
+ {{heuristic-details-list heuristic-details = model.yarnappheuristicresults}} +
+
+ + + + diff --git a/web/app/templates/application.hbs b/web/app/templates/application.hbs new file mode 100644 index 000000000..986c45082 --- /dev/null +++ b/web/app/templates/application.hbs @@ -0,0 +1,57 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} +{{notification-container}} + + + +{{outlet}} diff --git a/web/app/templates/components/aggregated-metrics.hbs b/web/app/templates/components/aggregated-metrics.hbs new file mode 100644 index 000000000..47ee44488 --- /dev/null +++ b/web/app/templates/components/aggregated-metrics.hbs @@ -0,0 +1,46 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + +
+

+ + + + + + + +
used resources {{getResourceInGBHours application.resourceused}} + {{#tooltip-on-element}} + Resources used (GB Hours) + {{/tooltip-on-element}} + wasted resources {{getPercentage application.resourcewasted application.resourceused}} + {{#tooltip-on-element}} + Resources wasted (%) + {{/tooltip-on-element}} + total runtime {{getDurationBreakdown application.runtime}} + {{#tooltip-on-element}} + Duration (HH:MM:SS) + {{/tooltip-on-element}} + total wait time {{getPercentage application.waittime application.runtime}} + {{#tooltip-on-element}} + Delay (%) + {{/tooltip-on-element}} +
+

+
diff --git a/web/app/templates/components/application-list.hbs b/web/app/templates/components/application-list.hbs new file mode 100644 index 000000000..18910f2f6 --- /dev/null +++ b/web/app/templates/components/application-list.hbs @@ -0,0 +1,21 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + +{{#each applications as |application|}} + {{single-application application=application}} +{{/each}} \ No newline at end of file diff --git a/web/app/templates/components/flow-list.hbs b/web/app/templates/components/flow-list.hbs new file mode 100644 index 000000000..a8c213c93 --- /dev/null +++ b/web/app/templates/components/flow-list.hbs @@ -0,0 +1,21 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + +{{#each workflows as |flow|}} + {{single-flow flow=flow}} +{{/each}} \ No newline at end of file diff --git a/web/app/templates/components/heuristic-details-list.hbs b/web/app/templates/components/heuristic-details-list.hbs new file mode 100644 index 000000000..e3a56cd5b --- /dev/null +++ b/web/app/templates/components/heuristic-details-list.hbs @@ -0,0 +1,21 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + +{{#each heuristic-details as |yarnappheuristicresult|}} + {{single-heuristic-detail yarnappheuristicresult=yarnappheuristicresult}} +{{/each}} \ No newline at end of file diff --git a/web/app/templates/components/heuristics-summary.hbs b/web/app/templates/components/heuristics-summary.hbs new file mode 100644 index 000000000..d0e55b8b3 --- /dev/null +++ b/web/app/templates/components/heuristics-summary.hbs @@ -0,0 +1,21 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + + {{#each heuristics as |heuristic|}} +
{{heuristic.name}}
+ {{/each}} diff --git a/web/app/templates/components/job-list.hbs b/web/app/templates/components/job-list.hbs new file mode 100644 index 000000000..9e3b5d8f8 --- /dev/null +++ b/web/app/templates/components/job-list.hbs @@ -0,0 +1,21 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + +{{#each jobs as |job|}} + {{single-job job=job}} +{{/each}} \ No newline at end of file diff --git a/web/app/templates/components/jobs-severity.hbs b/web/app/templates/components/jobs-severity.hbs new file mode 100644 index 000000000..4b663f74d --- /dev/null +++ b/web/app/templates/components/jobs-severity.hbs @@ -0,0 +1,26 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + +
    + {{#each jobsseverity as |severity|}} +
  • +

    {{severity.count}}

    +

    {{severity.severity}}

    +
  • + {{/each}} +
\ No newline at end of file diff --git a/web/app/templates/components/loading-panel.hbs b/web/app/templates/components/loading-panel.hbs new file mode 100644 index 000000000..53d0e3e0c --- /dev/null +++ b/web/app/templates/components/loading-panel.hbs @@ -0,0 +1,22 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + +
+ +

{{message}}

+
\ No newline at end of file diff --git a/web/app/templates/components/map-reduce-exception.hbs b/web/app/templates/components/map-reduce-exception.hbs new file mode 100644 index 000000000..90d540327 --- /dev/null +++ b/web/app/templates/components/map-reduce-exception.hbs @@ -0,0 +1,59 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + +
+ {{#cp-panel as |p|}} + {{#p.toggle}} +

{{job.name}}

+ {{/p.toggle}} + {{#p.body}} + + {{#cp-panels class='u-margin-bottom' accordion=true as |panels|}} + {{#each job.applications as |application|}} + {{#panels.panel as |p|}} + {{#p.toggle}} +

{{application.name}}

+ {{/p.toggle}} + + {{#p.body}} + {{#unless (eq application.exceptionSummary "")}} +
{{application.exceptionSummary}}
+ {{/unless}} + {{#each application.tasks as |task|}} + {{#cp-panels class='u-margin-bottom' accordion=true as |panels|}} + {{#panels.panel as |taskpanel|}} + {{#taskpanel.toggle}} +

{{task.name}}

+ {{/taskpanel.toggle}} + + {{#taskpanel.body}} +
{{task.exceptionSummary}}
+ {{/taskpanel.body}} + {{/panels.panel}} + {{/cp-panels}} + {{/each}} + {{/p.body}} + {{/panels.panel}} + + {{/each}} + {{/cp-panels}} + {{/p.body}} + {{/cp-panel}} +
+ + diff --git a/web/app/templates/components/paging-panel.hbs b/web/app/templates/components/paging-panel.hbs new file mode 100644 index 000000000..f15163d05 --- /dev/null +++ b/web/app/templates/components/paging-panel.hbs @@ -0,0 +1,35 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + + \ No newline at end of file diff --git a/web/app/templates/components/search-panel.hbs b/web/app/templates/components/search-panel.hbs new file mode 100644 index 000000000..bb60d7e38 --- /dev/null +++ b/web/app/templates/components/search-panel.hbs @@ -0,0 +1,44 @@ + +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + + +
+
+ + +
+ +
+ {{input size="40" id = "primary-search" type="text" class="form-control" placeholder=selectedTypePlaceHolder enter='search' value=searchQuery}} +
+ + + +
\ No newline at end of file diff --git a/web/app/templates/components/single-application.hbs b/web/app/templates/components/single-application.hbs new file mode 100644 index 000000000..082e48be0 --- /dev/null +++ b/web/app/templates/components/single-application.hbs @@ -0,0 +1,63 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + + + + + + +
+ + {{#link-to "app" (query-params applicationid=application.id) class="list-group-item list-group-item-control"}} + + + + + + +
+ {{#if (not-empty application.jobname)}} + {{application.jobname}} {{application.id}} + {{else}} + {{application.id}} + {{/if}} + + {{application.username}}
+ + + + + + + + + + + + + + + +
+ {{getDate application.finishtime}} +
{{heuristics-summary heuristics=application.heuristicsummary}}
{{aggregated-metrics application=application}}
+ {{/link-to}} +
diff --git a/web/app/templates/components/single-flow.hbs b/web/app/templates/components/single-flow.hbs new file mode 100644 index 000000000..768188988 --- /dev/null +++ b/web/app/templates/components/single-flow.hbs @@ -0,0 +1,56 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + + + + + + +
+ + {{#link-to "workflow" (query-params workflowid=flow.id) class="list-group-item list-group-item-control"}} + + + + + +
+ {{flow.flowname}} + {{flow.username}}
+ + + + + + + + + + + + + +
+ {{getDate flow.finishtime}} +
{{jobs-severity jobsseverity=flow.jobsseverity}}
{{aggregated-metrics application=flow}}
+ {{/link-to}} +
+ + diff --git a/web/app/templates/components/single-heuristic-detail.hbs b/web/app/templates/components/single-heuristic-detail.hbs new file mode 100644 index 000000000..04ed5be47 --- /dev/null +++ b/web/app/templates/components/single-heuristic-detail.hbs @@ -0,0 +1,45 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + + diff --git a/web/app/templates/components/single-job.hbs b/web/app/templates/components/single-job.hbs new file mode 100644 index 000000000..376d1f2a1 --- /dev/null +++ b/web/app/templates/components/single-job.hbs @@ -0,0 +1,60 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + + + + + + +
+ + {{#link-to "job" (query-params jobid=job.id) class="list-group-item"}} + + + + + + +
+ {{#if (not-empty job.jobname)}} + {{job.jobtype}}: {{job.jobname}} + {{else}} + {{job.jobtype}}: {{job.jobdisplayname}} + {{/if}} + {{job.username}}
+ + + + + + + + + + + + + + + +
+ {{getDate job.finishtime}} +
{{tasks-severity tasksseverity=job.tasksseverity}}
{{aggregated-metrics application=job}}
+ {{/link-to}} +
diff --git a/web/app/templates/components/single-tab.hbs b/web/app/templates/components/single-tab.hbs new file mode 100644 index 000000000..673a77de9 --- /dev/null +++ b/web/app/templates/components/single-tab.hbs @@ -0,0 +1,19 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + + \ No newline at end of file diff --git a/web/app/templates/components/tasks-severity.hbs b/web/app/templates/components/tasks-severity.hbs new file mode 100644 index 000000000..b4b7a1ccf --- /dev/null +++ b/web/app/templates/components/tasks-severity.hbs @@ -0,0 +1,26 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + +
    + {{#each tasksseverity as |severity|}} +
  • +

    {{severity.count}}

    +

    {{severity.severity}}

    +
  • + {{/each}} +
\ No newline at end of file diff --git a/web/app/templates/components/user-tabs.hbs b/web/app/templates/components/user-tabs.hbs new file mode 100644 index 000000000..5260f41e7 --- /dev/null +++ b/web/app/templates/components/user-tabs.hbs @@ -0,0 +1,41 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + + \ No newline at end of file diff --git a/web/app/templates/components/user-tags.hbs b/web/app/templates/components/user-tags.hbs new file mode 100644 index 000000000..001be96a5 --- /dev/null +++ b/web/app/templates/components/user-tags.hbs @@ -0,0 +1,35 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + +
+ +
\ No newline at end of file diff --git a/web/app/templates/dashboard.hbs b/web/app/templates/dashboard.hbs new file mode 100644 index 000000000..1678bd1dc --- /dev/null +++ b/web/app/templates/dashboard.hbs @@ -0,0 +1,55 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + + + +
+ + + + + + + +
+
+

Hello there, I've been busy!

+ +

I looked through {{model.total}} jobs today.
+ About {{model.severe}} of them could use some tuning.
+ About {{model.critical}} of them need some serious attention! +

+
+
+ + + +
+
+ +
+ + {{outlet defaultUser="defaultUser"}} +
diff --git a/web/app/templates/dashboard/app.hbs b/web/app/templates/dashboard/app.hbs new file mode 100644 index 000000000..08499ac8f --- /dev/null +++ b/web/app/templates/dashboard/app.hbs @@ -0,0 +1,25 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + +{{!-- The user tabs. Tabs can be added and deleted --}} +{{user-tabs deleteTab=(action "deleteTab") setTab=(action "changeTab") addTab=(action "addTab") usernames=model.usernames}} +{{#if loading}} + {{loading-panel message="loading applications... "}} +{{else}} + {{application-list applications=model.applications}} +{{/if}} diff --git a/web/app/templates/dashboard/job.hbs b/web/app/templates/dashboard/job.hbs new file mode 100644 index 000000000..053d2212a --- /dev/null +++ b/web/app/templates/dashboard/job.hbs @@ -0,0 +1,25 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + +{{!-- The user tabs. Tabs can be added and deleted --}} +{{user-tabs deleteTab=(action "deleteTab") setTab=(action "changeTab") addTab=(action "addTab") usernames=model.usernames}} +{{#if loading}} + {{loading-panel message="loading jobs..."}} +{{else}} + {{job-list jobs=model.jobs}} +{{/if}} \ No newline at end of file diff --git a/web/app/templates/dashboard/workflow.hbs b/web/app/templates/dashboard/workflow.hbs new file mode 100644 index 000000000..5d396a283 --- /dev/null +++ b/web/app/templates/dashboard/workflow.hbs @@ -0,0 +1,27 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + +{{!-- The user tabs. Tabs can be added and deleted --}} +{{user-tabs deleteTab=(action "deleteTab") setTab=(action "changeTab") addTab=(action "addTab") usernames=model.usernames showInputBox=showInputBox}} + +{{!-- The list of flows of the user --}} +{{#if loading}} + {{loading-panel message="loading flows..."}} +{{else}} + {{flow-list workflows=model.workflows}} +{{/if}} diff --git a/web/app/templates/job.hbs b/web/app/templates/job.hbs new file mode 100644 index 000000000..8ec5e5327 --- /dev/null +++ b/web/app/templates/job.hbs @@ -0,0 +1,105 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + + + +
+ + + +
+

{{model.jobdisplayname}}

+
+ + + + + + + + + + + + + + + + + + + + + + + + + +
User{{model.username}}
Queue{{model.queue}}
Jobtype{{model.jobtype}}
Start time{{model.starttime}}
End time{{model.finishtime}}
+ + + + + + + + + + + +
FlowHistory Flow + definition Flow + execution
JobHistory Job + definition Job + execution
+ + + + + + + + + + +
+ {{tasks-severity tasksseverity=model.tasksseverity}} +
+ {{aggregated-metrics application=model}} +
+
+ +
+
Applications
+ + {{application-list applications=model.taskssummaries}} +
+ +
diff --git a/web/app/templates/loading.hbs b/web/app/templates/loading.hbs new file mode 100644 index 000000000..6b3e9874d --- /dev/null +++ b/web/app/templates/loading.hbs @@ -0,0 +1,29 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + + + +
+
+ +

loading...

+
+
+ + + diff --git a/web/app/templates/not-found.hbs b/web/app/templates/not-found.hbs new file mode 100644 index 000000000..b201cd138 --- /dev/null +++ b/web/app/templates/not-found.hbs @@ -0,0 +1,41 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + + +
+ +
diff --git a/web/app/templates/search.hbs b/web/app/templates/search.hbs new file mode 100644 index 000000000..89b3e80d8 --- /dev/null +++ b/web/app/templates/search.hbs @@ -0,0 +1,171 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + + + +
+ + + + {{!-- show application list --}} + {{#unless (eq model.summaries.total 0)}} +
+
Applications
+ {{#if loading}} + {{loading-panel message="searching..."}} + {{else}} + {{application-list applications=model.summaries.summaries}} + {{/if}} +
+ {{/unless}} + +{{!-- show pagination --}} + {{#if shouldShowPaging}} + {{paging-panel paging=paging loadPage=(action 'loadPage')}} + {{/if}} +
+ diff --git a/web/app/templates/user-details.hbs b/web/app/templates/user-details.hbs new file mode 100644 index 000000000..6b7b53f82 --- /dev/null +++ b/web/app/templates/user-details.hbs @@ -0,0 +1,117 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + + + +
+ +
+ + {{user-tags usernamesArray=usernamesArray deleteTab=(action "deleteTab") addTab=(action "addTab")}} + +
+
+
+ + {{bootstrap-datepicker value=finishTimeBeginValue utc=true class="form-control" placeholder="From: mm/dd/yyyy" autoclose=true}} +
+
+ + {{bootstrap-datepicker value=finishTimeEndValue utc=true class="form-control" placeholder="To: mm/dd/yyyy" autoclose=true}} +
+ +
+ + + {{input type="checkbox" name="isIncreasing" checked=increasing}} + + +
+
+
+ +
+
+
+ +
+ + {{#if loading}} + {{loading-panel message="searching..."}} + {{else}} + {{#if showUserDetails}} +
+
+
{{getResourceInGbHoursValue model.resourceused}}
+
Resources Used (GB Hours)
+
+ +
+
{{getResourceInGbHoursValue model.resourcewasted}}
+
Resources wasted (GB Hours)
+
+ +
+
{{getDurationBreakdown model.runtime}}
+
Runtime (HH:MM:SS)
+
+ +
+
{{getDurationBreakdown model.waittime}}
+
Total delay (HH:MM:SS)
+
+ +
+
{{model.totalworkflows}}
+
Workflows
+
+ +
+
{{model.totaljobs}}
+
Jobs
+
+ +
+
{{model.totalapplications}}
+
Applications
+
+
+ +
+
Applications
+ {{application-list applications=model.summaries}} +
+ + {{/if}} + + {{#if shouldShowPaging}} + {{paging-panel paging=paging loadPage=(action 'loadPage')}} + {{/if}} + + {{/if}} +
+ diff --git a/web/app/templates/work-in-progress.hbs b/web/app/templates/work-in-progress.hbs new file mode 100644 index 000000000..203ea96eb --- /dev/null +++ b/web/app/templates/work-in-progress.hbs @@ -0,0 +1,42 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + + +
+ +
\ No newline at end of file diff --git a/web/app/templates/workflow-exceptions.hbs b/web/app/templates/workflow-exceptions.hbs new file mode 100644 index 000000000..14eb1903c --- /dev/null +++ b/web/app/templates/workflow-exceptions.hbs @@ -0,0 +1,100 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + + + +
+ + + + + + + {{#if loading}} + {{loading-panel message="searching..."}} + {{else}} + {{#if showExceptions}} + {{#unless (eq model null)}} +
+ +
+ {{#each model.exceptions as |job|}} + + {{#cp-panels class='u-margin-bottom workflow-panel-cp' accordion=true as |panels|}} + {{#panels.panel as |p|}} + {{#if (eq job.type "MR")}} + {{map-reduce-exception job=job}} + {{else if (eq job.type "SCRIPT")}} + {{#p.toggle}} +

{{job.name}}

+ {{/p.toggle}} + + {{#p.body}} +
+
{{job.exceptionSummary}}
+
+ {{/p.body}} + + {{else if (eq job.type "SCHEDULER")}} + {{#p.toggle}} +

{{job.name}}

+ {{/p.toggle}} + + {{#p.body}} +
+
{{job.exceptionSummary}}
+
+ {{/p.body}} + {{/if}} + {{/panels.panel}} + {{/cp-panels}} + + {{/each}} +
+
+ {{/unless}} + {{/if}} + {{/if}} + +
+ + + + diff --git a/web/app/templates/workflow.hbs b/web/app/templates/workflow.hbs new file mode 100644 index 000000000..87b3d6a3d --- /dev/null +++ b/web/app/templates/workflow.hbs @@ -0,0 +1,86 @@ +{{!-- + + Copyright 2016 LinkedIn Corp. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +--}} + + + +
+ + + +
+

{{model.flowname}}

+
+ + + + + + + + + + + + + + + + + + + + + +
User{{model.username}}
Queue{{model.queue}}
Start time{{model.starttime}}
End time{{model.finishtime}}
+ + + + + +
+ + + + + + + + + + + +
{{jobs-severity jobsseverity=model.jobsseverity}}
{{aggregated-metrics application=model}}
+
+ +
+
Jobs
+ {{job-list jobs=model.jobssummaries}} +
+ +
\ No newline at end of file diff --git a/web/app/utils/scheduler.js b/web/app/utils/scheduler.js new file mode 100644 index 000000000..a15aff342 --- /dev/null +++ b/web/app/utils/scheduler.js @@ -0,0 +1,100 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +// add other schedulers here +const Schedulers = {'AZKABAN': 'azkaban'}; + +export default Ember.Object.extend({ + + getFlowName(flowExecutionId, flowDefinitionId, schedulerName) { + + var flowName; + + try { + + // can add multiple schedulers in the switch statement. + switch (schedulerName) { + + + case Schedulers.AZKABAN: + var parser = document.createElement('a'); + + // flowdefid is always of the form http://localhost:8043/manager?project=projectname&flow=flowname. + // throw exception for anything else + parser.href = flowDefinitionId; + + var queryString = (parser.search).substring(1); + var projectname = queryString.split("&")[0].split("=")[1]; + var flowname = queryString.split("&")[1].split("=")[1]; + + // flowexecid is always of the form http://localhost:8043/executor?execid=id. + // throw exception for anything else + parser.href = flowExecutionId; + + var execution = (parser.search).substring(1).split("&")[0].split("=")[1]; + + // create name of the flow scheduler:project:flow:execution + flowName = Schedulers.AZKABAN + ": " + projectname + ": " + flowname + ": " + execution; + break; + + default: + flowName = flowExecutionId; + + } + } catch (err) { + flowName = flowExecutionId; + } + return flowName; + }, + + getJobDisplayName(jobExecutionId, jobDefinitionId, schedulerName) { + + var displayName; + + try { + + // can add multiple schedulers in the switch statement. + switch (schedulerName) { + + + case Schedulers.AZKABAN: + var parser = document.createElement('a'); + + parser.href = jobDefinitionId; + + var queryString = (parser.search).substring(1); + var jobname = queryString.split("&")[2].split("=")[1]; + + parser.href = jobExecutionId; + queryString = (parser.search).substring(1); + var execution = queryString.split("&")[0].split("=")[1]; + + displayName = jobname + ": " + execution; + console.log(displayName); + break; + + default: + displayName = jobExecutionId; + + } + } catch (err) { + displayName = jobExecutionId; + } + return displayName; + } +}); diff --git a/web/bower.json b/web/bower.json new file mode 100644 index 000000000..7423882af --- /dev/null +++ b/web/bower.json @@ -0,0 +1,12 @@ +{ + "name": "dr-elephant", + "dependencies": { + "ember": "2.6.2", + "ember-cli-shims": "0.1.1", + "ember-cli-test-loader": "0.2.2", + "ember-qunit-notifications": "0.1.0", + "bootstrap": "3.3.7", + "d3": "4.2.6", + "bootstrap-datepicker": "^1.6.4" + } +} diff --git a/web/config/environment.js b/web/config/environment.js new file mode 100644 index 000000000..7c44f65f2 --- /dev/null +++ b/web/config/environment.js @@ -0,0 +1,73 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/* jshint node: true */ + +module.exports = function(environment) { + var ENV = { + + /* you can enable 'ENV.APP.enableMetrics': true, and add your own metrics tracking code below. Example shown */ + /** + metricsAdapters: [ + { + name: 'Piwik', + environments: ['production','development'], + config: { + piwikUrl: 'https://localhost/piwik/', + siteId: 86 + } + } + ], + **/ + modulePrefix: 'dr-elephant', + environment: environment, + baseURL: '/', + locationType: 'auto', + EmberENV: { + FEATURES: {} + }, + + APP: { + 'enableMetrics': false + }, + 'ember-cli-notifications': { + icons: 'bootstrap' + } + }; + + if (environment === 'development') { + ENV.APP.LOG_RESOLVER = true; + ENV.APP.LOG_ACTIVE_GENERATION = true; + ENV.APP.LOG_TRANSITIONS = true; + ENV.APP.LOG_TRANSITIONS_INTERNAL = true; + ENV.APP.LOG_VIEW_LOOKUPS = true; + } + + if (environment === 'test') { + ENV.rootURL = '/new/'; + ENV.locationType = 'none'; + ENV.APP.LOG_ACTIVE_GENERATION = false; + ENV.APP.LOG_VIEW_LOOKUPS = false; + ENV.APP.rootElement = '#ember-testing'; + } + + if (environment === 'production') { + ENV.rootURL = "/new"; + ENV.locationType = 'hash'; + } + + return ENV; +}; diff --git a/web/ember-cli-build.js b/web/ember-cli-build.js new file mode 100644 index 000000000..db4c4ef9d --- /dev/null +++ b/web/ember-cli-build.js @@ -0,0 +1,50 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +var EmberApp = require('ember-cli/lib/broccoli/ember-app'); + +module.exports = function (defaults) { + var app = new EmberApp(defaults, { + storeConfigInMeta: false, + SRI: { + enabled: false + }, + fingerprint: { + enabled: false + }, + outputPaths: { + app: { + css: { + 'app': '/assets/dr-elephant.css' + }, + js: '/assets/dr-elephant.js' + + }, + vendor: { + css: '/assets/vendor.css', + js: '/assets/vendor.js' + } + } + // Add options here + }); + app.import(app.bowerDirectory + '/bootstrap/dist/css/bootstrap.css'); + app.import(app.bowerDirectory + '/bootstrap/dist/js/bootstrap.js'); + + app.import(app.bowerDirectory + '/bootstrap/dist/fonts/glyphicons-halflings-regular.woff', { + destDir: 'fonts' + }); + return app.toTree(); +}; diff --git a/web/package.json b/web/package.json new file mode 100644 index 000000000..1e2eeabf3 --- /dev/null +++ b/web/package.json @@ -0,0 +1,54 @@ +{ + "name": "dr-elephant", + "version": "0.0.0", + "description": "Dr. Elephant is a performance monitoring and tuning tool for Hadoop and Spark", + "private": true, + "directories": { + "doc": "doc", + "test": "tests" + }, + "scripts": { + "build": "ember build --prod", + "start": "ember server", + "test": "ember test" + }, + "repository": "", + "engines": { + "node": ">= 0.10.0" + }, + "author": "", + "license": "MIT", + "devDependencies": { + "bower": "1.7.9", + "broccoli-asset-rev": "2.4.6", + "ember-ajax": "2.5.1", + "ember-cli": "2.6.3", + "ember-cli-app-version": "1.0.1", + "ember-cli-babel": "5.1.10", + "ember-cli-bootstrap-datepicker": "0.5.6", + "ember-cli-d3": "1.1.7", + "ember-cli-dependency-checker": "1.3.0", + "ember-cli-htmlbars": "1.1.0", + "ember-cli-htmlbars-inline-precompile": "0.3.5", + "ember-cli-inject-live-reload": "1.4.1", + "ember-cli-jshint": "1.0.5", + "ember-cli-moment-shim": "2.2.0", + "ember-cli-notifications": "4.0.9", + "ember-cli-qunit": "1.4.2", + "ember-cli-release": "0.2.9", + "ember-cli-sri": "2.1.1", + "ember-cli-uglify": "1.2.0", + "ember-collapsible-panel": "2.0.1", + "ember-d3": "0.2.0", + "ember-data": "2.7.0", + "ember-export-application-global": "1.0.5", + "ember-load-initializers": "0.5.1", + "ember-metrics": "0.6.4", + "ember-moment": "6.1.0", + "ember-resolver": "2.0.3", + "ember-tether": "0.3.1", + "ember-tooltips": "2.3.2", + "ember-welcome-page": "1.0.3", + "loader.js": "4.0.11" + } +} diff --git a/web/public/assets/images/loading.gif b/web/public/assets/images/loading.gif new file mode 100644 index 000000000..3c2f7c058 Binary files /dev/null and b/web/public/assets/images/loading.gif differ diff --git a/web/public/assets/images/logo.png b/web/public/assets/images/logo.png new file mode 100644 index 000000000..a7f7b9670 Binary files /dev/null and b/web/public/assets/images/logo.png differ diff --git a/web/public/assets/images/runtime.png b/web/public/assets/images/runtime.png new file mode 100644 index 000000000..6f98430d9 Binary files /dev/null and b/web/public/assets/images/runtime.png differ diff --git a/web/public/assets/images/usedmemory.png b/web/public/assets/images/usedmemory.png new file mode 100644 index 000000000..c55a084a6 Binary files /dev/null and b/web/public/assets/images/usedmemory.png differ diff --git a/web/public/assets/images/waittime.png b/web/public/assets/images/waittime.png new file mode 100644 index 000000000..fb862f961 Binary files /dev/null and b/web/public/assets/images/waittime.png differ diff --git a/web/public/assets/images/wastedmemory.png b/web/public/assets/images/wastedmemory.png new file mode 100644 index 000000000..627bf8d9c Binary files /dev/null and b/web/public/assets/images/wastedmemory.png differ diff --git a/web/testem.js b/web/testem.js new file mode 100644 index 000000000..728576674 --- /dev/null +++ b/web/testem.js @@ -0,0 +1,29 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/*jshint node:true*/ +module.exports = { + "framework": "qunit", + "test_page": "tests/index.html?hidepassed", + "disable_watching": true, + "launch_in_ci": [ + "PhantomJS" + ], + "launch_in_dev": [ + "PhantomJS", + "Chrome" + ] +}; diff --git a/web/tests/helpers/destroy-app.js b/web/tests/helpers/destroy-app.js new file mode 100644 index 000000000..7cac22ba1 --- /dev/null +++ b/web/tests/helpers/destroy-app.js @@ -0,0 +1,21 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; + +export default function destroyApp(application) { + Ember.run(application, 'destroy'); +} diff --git a/web/tests/helpers/module-for-acceptance.js b/web/tests/helpers/module-for-acceptance.js new file mode 100644 index 000000000..8ed3afcca --- /dev/null +++ b/web/tests/helpers/module-for-acceptance.js @@ -0,0 +1,39 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import { module } from 'qunit'; +import Ember from 'ember'; +import startApp from '../helpers/start-app'; +import destroyApp from '../helpers/destroy-app'; + +const { RSVP: { Promise } } = Ember; + +export default function(name, options = {}) { + module(name, { + beforeEach() { + this.application = startApp(); + + if (options.beforeEach) { + return options.beforeEach.apply(this, arguments); + } + }, + + afterEach() { + let afterEach = options.afterEach && options.afterEach.apply(this, arguments); + return Promise.resolve(afterEach).then(() => destroyApp(this.application)); + } + }); +} diff --git a/web/tests/helpers/resolver.js b/web/tests/helpers/resolver.js new file mode 100644 index 000000000..56787ee09 --- /dev/null +++ b/web/tests/helpers/resolver.js @@ -0,0 +1,27 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Resolver from '../../resolver'; +import config from '../../config/environment'; + +const resolver = Resolver.create(); + +resolver.namespace = { + modulePrefix: config.modulePrefix, + podModulePrefix: config.podModulePrefix +}; + +export default resolver; diff --git a/web/tests/helpers/start-app.js b/web/tests/helpers/start-app.js new file mode 100644 index 000000000..aa284414e --- /dev/null +++ b/web/tests/helpers/start-app.js @@ -0,0 +1,34 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Ember from 'ember'; +import Application from '../../app'; +import config from '../../config/environment'; + +export default function startApp(attrs) { + let application; + + let attributes = Ember.merge({}, config.APP); + attributes = Ember.merge(attributes, attrs); // use defaults, but you can override; + + Ember.run(() => { + application = Application.create(attributes); + application.setupForTesting(); + application.injectTestHelpers(); + }); + + return application; +} diff --git a/web/tests/index.html b/web/tests/index.html new file mode 100644 index 000000000..2b9c60c0a --- /dev/null +++ b/web/tests/index.html @@ -0,0 +1,50 @@ + + + + + + + + DrElephant Tests + + + + {{content-for "head"}} + {{content-for "test-head"}} + + + + + + {{content-for "head-footer"}} + {{content-for "test-head-footer"}} + + + {{content-for "body"}} + {{content-for "test-body"}} + + + + + + + + + {{content-for "body-footer"}} + {{content-for "test-body-footer"}} + + diff --git a/web/tests/integration/components/aggregated-metrics-test.js b/web/tests/integration/components/aggregated-metrics-test.js new file mode 100644 index 000000000..4e9b20630 --- /dev/null +++ b/web/tests/integration/components/aggregated-metrics-test.js @@ -0,0 +1,35 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import { moduleForComponent, test } from 'ember-qunit'; +import hbs from 'htmlbars-inline-precompile'; + +moduleForComponent('aggregated-metrics', 'Integration | Component | aggregated metrics', { + integration: true +}); + +test("Test for rendering the aggregated-metrics component", function(assert) { + + this.set('application', { resourceused: 1000000000, resourcewasted: 10000000, runtime: 1000000, waittime: 10000}); + this.render(hbs`{{aggregated-metrics application=application}}`); + + assert.equal(this.$().text().trim().replace(/ /g,'').split("\n").join(""), '271.267GBHours1.00%00:16:401.00%'); + + this.set('application', { resourceused: 2342342342342, resourcewasted: 23423423, runtime:32324320, waittime: 3000}); + this.render(hbs`{{aggregated-metrics application=application}}`); + + assert.equal(this.$().text().trim().replace(/ /g,'').split("\n").join(""), "635401.026GBHours0.00%08:58:440.01%"); +}); diff --git a/web/tests/integration/components/heuristic-details-list-test.js b/web/tests/integration/components/heuristic-details-list-test.js new file mode 100644 index 000000000..1eeea899d --- /dev/null +++ b/web/tests/integration/components/heuristic-details-list-test.js @@ -0,0 +1,74 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import { moduleForComponent, test } from 'ember-qunit'; +import hbs from 'htmlbars-inline-precompile'; + +moduleForComponent('heuristic-details-list', 'Integration | Component | heuristic details list', { + integration: true +}); + +test('Test for heuristic-details', function(assert) { + this.set("heuristic-details", [{ + name: "Mapper Data Skew", + severity: "None", + details: [ + { + name: "Group A", + value: "4 tasks @ 443 MB avg" + }, + { + name: "Group B", + value: "53 tasks @ 464 MB avg" + }, + { + name: "Number of tasks", + value: "57" + } + ] + }, + { + name: "Mapper GC", + severity: "None", + details: [ + { + name: "Avg task CPU time (ms)", + value: "27565" + }, + { + name: "Avg task GC time (ms)", + value: "885" + }, + { + name: "Avg task runtime (ms)", + value: "40890" + }, + { + name: "Number of tasks", + value: "57" + }, + { + name: "Task GC/CPU ratio", + value: "0.03210593143479049" + } + ] + }]); + + this.render(hbs`{{heuristic-details-list heuristic-details=heuristic-details}}`); + + assert.equal(this.$().text().trim().split("\n").join("").replace(/ /g, ''), 'MapperDataSkewSeverity:NoneGroupA4tasks@443MBavgGroupB53tasks@464MBavgNumberoftasks57MapperGCSeverity:NoneAvgtaskCPUtime(ms)27565AvgtaskGCtime(ms)885Avgtaskruntime(ms)40890Numberoftasks57TaskGC/CPUratio0.03210593143479049'); + +}); diff --git a/web/tests/integration/components/heuristics-summary-test.js b/web/tests/integration/components/heuristics-summary-test.js new file mode 100644 index 000000000..ba46d6629 --- /dev/null +++ b/web/tests/integration/components/heuristics-summary-test.js @@ -0,0 +1,75 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import { moduleForComponent, test } from 'ember-qunit'; +import hbs from 'htmlbars-inline-precompile'; + +moduleForComponent('heuristics-summary', 'Integration | Component | heuristics summary', { + integration: true +}); + +test('Tests for the rendering of heuristics-summary component', function (assert) { + this.set("heuristics", [ + { + name: "Mapper Data Skew", + severity: "None" + }, + { + name: "Mapper GC", + severity: "None" + }, + { + name: "Mapper Time", + severity: "Low" + }, + { + name: "Mapper Speed", + severity: "Low" + }, + { + name: "Mapper Spill", + severity: "Low" + }, + { + name: "Mapper Memory", + severity: "None" + }, + { + name: "Reducer Data Skew", + severity: "None" + }, + { + name: "Reducer GC", + severity: "Low" + }, + { + name: "Reducer Time", + severity: "Low" + }, + { + name: "Reducer Memory", + severity: "None" + }, + { + name: "Shuffle & Sort", + severity: "Low" + } + ]); + this.render(hbs`{{heuristics-summary heuristics=heuristics}}`); + + assert.equal(this.$().text().trim().split("\n").join("").replace(/ /g, ''), 'MapperDataSkewMapperGCMapperTimeMapperSpeedMapperSpillMapperMemoryReducerDataSkewReducerGCReducerTimeReducerMemoryShuffle&Sort'); + +}); diff --git a/web/tests/integration/components/jobs-severity-test.js b/web/tests/integration/components/jobs-severity-test.js new file mode 100644 index 000000000..f2df7b792 --- /dev/null +++ b/web/tests/integration/components/jobs-severity-test.js @@ -0,0 +1,44 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import { moduleForComponent, test } from 'ember-qunit'; +import hbs from 'htmlbars-inline-precompile'; + +moduleForComponent('jobs-severity', 'Integration | Component | jobs severity', { + integration: true +}); + +test('Tests for the job severity component', function(assert) { + + this.set("jobsseverity", [ + { + severity: "Severe", + count: 1 + }, + { + severity: "Moderate", + count: 2 + }, + { + severity: "Critical", + count: 1 + } + ]); + this.render(hbs`{{jobs-severity jobsseverity=jobsseverity}}`); + + assert.equal(this.$('#job_severities').text().trim().split("\n").join("").replace(/ /g, ''), '1Severe2Moderate1Critical'); + +}); diff --git a/web/tests/integration/components/single-application-test.js b/web/tests/integration/components/single-application-test.js new file mode 100644 index 000000000..0694f0335 --- /dev/null +++ b/web/tests/integration/components/single-application-test.js @@ -0,0 +1,121 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import { moduleForComponent, test } from 'ember-qunit'; +import hbs from 'htmlbars-inline-precompile'; + +moduleForComponent('single-application', 'Integration | Component | single application', { + integration: true +}); + +test('Test for single application component', function(assert) { + + this.set("application",{ + "id": "application_1", + "username": "user1", + "jobname": "job1", + "jobtype": "HadoopJava", + "starttime": 1475501548193, + "finishtime": 1475501987103, + "runtime": 438910, + "waittime": 21976, + "resourceused": 252366848, + "resourcewasted": 86381426, + "queue": "random", + "severity": "Severe", + "heuristicsummary": [ + { + "name": "Mapper Data Skew", + "severity": "None" + }, + { + "name": "Mapper GC", + "severity": "None" + }, + { + "name": "Mapper Time", + "severity": "Moderate" + }, + { + "name": "Mapper Speed", + "severity": "None" + }, + { + "name": "Mapper Spill", + "severity": "None" + }, + { + "name": "Mapper Memory", + "severity": "Severe" + }, + { + "name": "Reducer Data Skew", + "severity": "None" + }, + { + "name": "Reducer GC", + "severity": "None" + }, + { + "name": "Reducer Time", + "severity": "None" + }, + { + "name": "Reducer Memory", + "severity": "None" + }, + { + "name": "Shuffle & Sort", + "severity": "None" + } + ] + }); + + this.render(hbs`{{single-application application=application}}`); + + assert.equal(this.$('#app_summary_user').text().trim(), 'user1'); + assert.equal(this.$('#app_summary_id').text().trim().replace(/ /g,''), 'job1application_1'); + assert.equal(this.$('#app_summary_finishtime').text().trim(), 'Mon Oct 03 2016 19:09:47 GMT+0530 (IST)'); + assert.equal(this.$('#app_summary_aggregated_metrics').text().trim().split("\n").join("").replace(/ /g, ''), '68.459GBHours34.23%00:07:185.01%'); + assert.equal(this.$('#app_summary_heuristics_summary').text().trim().split("\n").join(""), 'Mapper Data Skew Mapper GC Mapper Time Mapper Speed Mapper Spill Mapper Memory Reducer Data Skew Reducer GC Reducer Time Reducer Memory Shuffle & Sort'); + + + // set jobtype to null, the id should be only application name + this.set("application", { + "id": "application_1", + "username": "user1", + "jobname": null, + "jobtype": "HadoopJava", + "starttime": 1475501548193, + "finishtime": 1475501987103, + "runtime": 438910, + "waittime": 21976, + "resourceused": 252366848, + "resourcewasted": 86381426, + "queue": "random", + "severity": "Severe", + "heuristicsummary": [ + { + "name": "Mapper Data Skew", + "severity": "None" + } + ] + }); + + assert.equal(this.$('#app_summary_id').text().trim().replace(/ /g,''), 'application_1'); + +}); + diff --git a/web/tests/integration/components/single-flow-test.js b/web/tests/integration/components/single-flow-test.js new file mode 100644 index 000000000..463975413 --- /dev/null +++ b/web/tests/integration/components/single-flow-test.js @@ -0,0 +1,54 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import { moduleForComponent, test } from 'ember-qunit'; +import hbs from 'htmlbars-inline-precompile'; + +moduleForComponent('single-flow', 'Integration | Component | single flow', { + integration: true +}); + +test('Test for single-flow component', function(assert) { + this.set("flow", { + id: "id1", + username: "user1", + finishtime: 332823048, + startime: 332432432, + resourceused: 3423423, + resourcewasted: 234343, + runtime: 1899687, + waittime: 1099583, + jobsseverity: [ + { + severity: "Severe", + count: 1 + }, + { + severity: "Critical", + count: 5 + } + ] + }); + + this.render(hbs`{{single-flow flow=flow}}`); + + assert.equal(this.$('#flow_summary_username').text().trim(), 'user1'); + assert.equal(this.$('#flow_summary_finishtime').text().trim(), 'Mon Jan 05 1970 01:57:03 GMT+0530 (IST)'); + assert.equal(this.$('#flow_summary_aggregated_metrics').text().trim().split("\n").join("").replace(/ /g, ''), '0.929GBHours6.85%00:31:3957.88%'); + assert.equal(this.$('#flow_summary_jobs_severity').text().trim().split("\n").join("").replace(/ /g, ''), '1Severe5Critical'); + +}); + diff --git a/web/tests/integration/components/single-heuristic-detail-test.js b/web/tests/integration/components/single-heuristic-detail-test.js new file mode 100644 index 000000000..1da84d698 --- /dev/null +++ b/web/tests/integration/components/single-heuristic-detail-test.js @@ -0,0 +1,49 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import { moduleForComponent, test } from 'ember-qunit'; +import hbs from 'htmlbars-inline-precompile'; + +moduleForComponent('single-heuristic-detail', 'Integration | Component | single heuristic detail', { + integration: true +}); + +test('it renders', function(assert) { + + this.set("yarnappheuristicresult", { + name: "Mapper Data Skew", + severity: "None", + details: [ + { + name: "Group A", + value: "4 tasks @ 443 MB avg" + }, + { + name: "Group B", + value: "53 tasks @ 464 MB avg" + }, + { + name: "Number of tasks", + value: "57" + } + ] + }); + + this.render(hbs`{{single-heuristic-detail yarnappheuristicresult=yarnappheuristicresult}}`); + + assert.equal(this.$().text().trim().split("\n").join("").replace(/ /g, ''), 'MapperDataSkewSeverity:NoneGroupA4tasks@443MBavgGroupB53tasks@464MBavgNumberoftasks57'); + +}); diff --git a/web/tests/integration/components/single-job-test.js b/web/tests/integration/components/single-job-test.js new file mode 100644 index 000000000..02a2e6856 --- /dev/null +++ b/web/tests/integration/components/single-job-test.js @@ -0,0 +1,82 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import { moduleForComponent, test } from 'ember-qunit'; +import hbs from 'htmlbars-inline-precompile'; + +moduleForComponent('single-job', 'Integration | Component | single job', { + integration: true +}); + +test('test for single-job component', function (assert) { + this.set("job", { + id: "id1", + jobname: "sample_job", + username: "user1", + finishtime: 332823048, + startime: 332432432, + resourceused: 3423423, + resourcewasted: 234343, + runtime: 1899687, + waittime: 1099583, + tasksseverity: [ + { + severity: "Severe", + count: 1 + }, + { + severity: "Critical", + count: 5 + } + ] + }); + + this.render(hbs`{{single-job job=job}}`); + + assert.equal(this.$('#job_name').text().trim(), ': sample_job'); + assert.equal(this.$('#job_summary_username').text().trim(), 'user1'); + assert.equal(this.$('#job_summary_finishtime').text().trim(), 'Mon Jan 05 1970 01:57:03 GMT+0530 (IST)'); + assert.equal(this.$('#job_summary_aggregated_metrics').text().trim().split("\n").join("").replace(/ /g, ''), '0.929GBHours6.85%00:31:3957.88%'); + assert.equal(this.$('#job_summary_task_severity').text().trim().split("\n").join("").replace(/ /g, ''), '1Severe5Critical'); + + + this.set("job", { + id: "id2", + jobname: "sample_job_2", + username: "user2", + finishtime: 3328230, + startime: 33243, + resourceused: 3423423, + resourcewasted: 234343, + runtime: 1899687, + waittime: 1099583, + tasksseverity: [ + { + severity: "Critical", + count: 5 + } + ] + }); + + this.render(hbs`{{single-job job=job}}`); + assert.equal(this.$('#job_name').text().trim(), ': sample_job_2'); + assert.equal(this.$('#job_summary_username').text().trim(), 'user2'); + assert.equal(this.$('#job_summary_finishtime').text().trim(), "Thu Jan 01 1970 06:25:28 GMT+0530 (IST)"); + assert.equal(this.$('#job_summary_aggregated_metrics').text().trim().split("\n").join("").replace(/ /g, ''), '0.929GBHours6.85%00:31:3957.88%'); + assert.equal(this.$('#job_summary_task_severity').text().trim().split("\n").join("").replace(/ /g, ''), '5Critical'); + +}); + diff --git a/web/tests/integration/components/single-tab-test.js b/web/tests/integration/components/single-tab-test.js new file mode 100644 index 000000000..977a0a7eb --- /dev/null +++ b/web/tests/integration/components/single-tab-test.js @@ -0,0 +1,39 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import { moduleForComponent, test } from 'ember-qunit'; +import hbs from 'htmlbars-inline-precompile'; + +moduleForComponent('single-tab', 'Integration | Component | single tab', { + integration: true +}); + +test('Test for single-tab component', function(assert) { + + this.set("name","user1"); + this.render(hbs`{{single-tab name=name}}`); + assert.equal(this.$().text().trim(), 'user1'); + + this.set("name",""); + this.render(hbs`{{single-tab name=name}}`); + assert.equal(this.$().text().trim(), ''); + + + this.set("name","all"); + this.render(hbs`{{single-tab name=name}}`); + assert.equal(this.$().text().trim(), 'all'); + +}); diff --git a/web/tests/integration/components/tasks-severity-test.js b/web/tests/integration/components/tasks-severity-test.js new file mode 100644 index 000000000..44f1cd741 --- /dev/null +++ b/web/tests/integration/components/tasks-severity-test.js @@ -0,0 +1,48 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import { moduleForComponent, test } from 'ember-qunit'; +import hbs from 'htmlbars-inline-precompile'; + +moduleForComponent('tasks-severity', 'Integration | Component | tasks severity', { + integration: true +}); + +test('Test for task severity component', function(assert) { + + // set task severities here + this.set("job", { + tasksseverity: [ + { + severity: "Severe", + count: 1 + }, + { + severity: "Critical", + count: 5 + } + ] + }); + this.render(hbs`{{tasks-severity tasksseverity=job.tasksseverity}}`); + + assert.equal(this.$().text().trim().split("\n").join("").replace(/ /g,''), '1Severe5Critical'); + + this.set("job",{}) + this.render(hbs`{{tasks-severity job=job}}`); + assert.equal(this.$().text().split("\n").join(""),''); + +}); + diff --git a/web/tests/integration/components/user-tabs-test.js b/web/tests/integration/components/user-tabs-test.js new file mode 100644 index 000000000..a2d921fae --- /dev/null +++ b/web/tests/integration/components/user-tabs-test.js @@ -0,0 +1,42 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import { moduleForComponent, test } from 'ember-qunit'; +import hbs from 'htmlbars-inline-precompile'; +import Ember from 'ember'; + +moduleForComponent('user-tabs', 'Integration | Component | user tabs', { + integration: true +}); + +test('Test for user tabs component', function(assert) { + + // single tab with All text when no data is passed + this.render(hbs`{{user-tabs}}`); + assert.equal(this.$().text().trim(), 'All'); + + + // multiple tabs with id and tabname as the name of the user + var usernamesArray = Ember.A(["user1","user2","user3","user4"]); + this.set("users", usernamesArray); + this.render(hbs`{{user-tabs usernames=users}}`); + + assert.equal(this.$('#all').text().trim(),'All'); + assert.equal(this.$('#user1').text().trim(),'user1'); + assert.equal(this.$('#user2').text().trim(),'user2'); + assert.equal(this.$('#user3').text().trim(),'user3'); + assert.equal(this.$('#user4').text().trim(),'user4'); +}); diff --git a/web/tests/test-helper.js b/web/tests/test-helper.js new file mode 100644 index 000000000..e9c082d0f --- /dev/null +++ b/web/tests/test-helper.js @@ -0,0 +1,22 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import resolver from './helpers/resolver'; +import { + setResolver +} from 'ember-qunit'; + +setResolver(resolver); diff --git a/web/tests/unit/helpers/eq-test.js b/web/tests/unit/helpers/eq-test.js new file mode 100644 index 000000000..eec1a0494 --- /dev/null +++ b/web/tests/unit/helpers/eq-test.js @@ -0,0 +1,37 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import { eq } from 'dr-elephant/helpers/eq'; +import { module, test } from 'qunit'; + +module('Unit | Helper | eq'); + +test('Test for eq helper', function(assert) { + let result = eq([100,100]); + assert.ok(result); + result = eq([10,100]); + assert.ok(!result); + result = eq(["100","100"]); + assert.ok(result); + result = eq(["100","10"]); + assert.ok(!result); + result = eq(["100",100]); + assert.ok(!result); + result = eq([100.00,100.00]); + assert.ok(result); + result = eq([100.0,100.1]); + assert.ok(!result); +}); diff --git a/web/tests/unit/helpers/get-bootstrap-severity-code-test.js b/web/tests/unit/helpers/get-bootstrap-severity-code-test.js new file mode 100644 index 000000000..8b1eb2c5d --- /dev/null +++ b/web/tests/unit/helpers/get-bootstrap-severity-code-test.js @@ -0,0 +1,33 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import { getBootstrapSeverityCode } from 'dr-elephant/helpers/get-bootstrap-severity-code'; +import { module, test } from 'qunit'; + +module('Unit | Helper | get bootstrap severity code'); + +test('Test for getBootstrapSeverityCode helper', function(assert) { + let result = getBootstrapSeverityCode(["critical"]); + assert.equal("danger",result); + result = getBootstrapSeverityCode(["severe"]); + assert.equal("severe",result); + result = getBootstrapSeverityCode(["moderate"]); + assert.equal("warning",result); + result = getBootstrapSeverityCode(["low"]); + assert.equal("success",result); + result = getBootstrapSeverityCode(["none"]); + assert.equal("success",result); +}); diff --git a/web/tests/unit/helpers/get-color-for-severity-test.js b/web/tests/unit/helpers/get-color-for-severity-test.js new file mode 100644 index 000000000..2afee0573 --- /dev/null +++ b/web/tests/unit/helpers/get-color-for-severity-test.js @@ -0,0 +1,34 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import { getColorForSeverity } from 'dr-elephant/helpers/get-color-for-severity'; +import { module, test } from 'qunit'; + +module('Unit | Helper | get color for severity'); + +test('Test for getColorForSeverity helper', function(assert) { + let result = getColorForSeverity(["critical"]); + assert.equal(result,"#D9534F"); + result = getColorForSeverity(["severe"]); + assert.equal(result,"#E4804E"); + result = getColorForSeverity(["moderate"]); + assert.equal(result,"#F0AD4E"); + result = getColorForSeverity(["low"]); + assert.equal(result,"#5CB85C"); + result = getColorForSeverity(["none"]); + assert.equal(result,"#5CB85C"); +}); + diff --git a/web/tests/unit/helpers/get-date-test.js b/web/tests/unit/helpers/get-date-test.js new file mode 100644 index 000000000..b6a7359f9 --- /dev/null +++ b/web/tests/unit/helpers/get-date-test.js @@ -0,0 +1,25 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import { getDate } from 'dr-elephant/helpers/get-date'; +import { module, test } from 'qunit'; + +module('Unit | Helper | get date'); + +test('test for getDate helper', function(assert) { + let result = getDate([42]); + assert.ok(result); +}); diff --git a/web/tests/unit/helpers/get-duration-breakdown-test.js b/web/tests/unit/helpers/get-duration-breakdown-test.js new file mode 100644 index 000000000..684a5620a --- /dev/null +++ b/web/tests/unit/helpers/get-duration-breakdown-test.js @@ -0,0 +1,35 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import { getDurationBreakdown } from 'dr-elephant/helpers/get-duration-breakdown'; +import { module, test } from 'qunit'; + +module('Unit | Helper | get duration breakdown'); + +test('Test for getDurationBreakdown helper', function(assert) { + let result = getDurationBreakdown([10000000]); + assert.equal(result,"02:46:40"); + result = getDurationBreakdown([0]); + assert.equal(result,"00:00:00"); + result = getDurationBreakdown([1]); + assert.equal(result,"00:00:00"); + result = getDurationBreakdown([1000]); + assert.equal(result,"00:00:01"); + result = getDurationBreakdown([3600000]); + assert.equal(result,"01:00:00"); + result = getDurationBreakdown([60000]); + assert.equal(result,"00:01:00"); +}); diff --git a/web/tests/unit/helpers/get-percentage-test.js b/web/tests/unit/helpers/get-percentage-test.js new file mode 100644 index 000000000..db097537a --- /dev/null +++ b/web/tests/unit/helpers/get-percentage-test.js @@ -0,0 +1,37 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import { getPercentage } from 'dr-elephant/helpers/get-percentage'; +import { module, test } from 'qunit'; + +module('Unit | Helper | get percentage'); + +test('Test for getPercentage helper', function(assert) { + let result = getPercentage([5,200]); + assert.equal(result,"2.50%"); + result = getPercentage([50,200]); + assert.equal(result,"25.00%"); + result = getPercentage([0,100]); + assert.equal(result,"0.00%"); + result = getPercentage([100,100]); + assert.equal(result,"100.00%"); + result = getPercentage([0,0]); + assert.equal(result,"0%"); + result = getPercentage([1,20]); + assert.equal(result,"5.00%"); + result = getPercentage([100,20]); + assert.equal(result,"500.00%"); +}); diff --git a/web/tests/unit/helpers/get-resource-in-gbhours-test.js b/web/tests/unit/helpers/get-resource-in-gbhours-test.js new file mode 100644 index 000000000..07ee91aa7 --- /dev/null +++ b/web/tests/unit/helpers/get-resource-in-gbhours-test.js @@ -0,0 +1,35 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import { getResourceInGBHours } from 'dr-elephant/helpers/get-resource-in-gbhours'; +import { module, test } from 'qunit'; + +module('Unit | Helper | get resource in gbhours'); + +test('Test for getResourceInGBHours helper', function(assert) { + let result = getResourceInGBHours([100001010]); + assert.equal(result,"27.127 GB Hours"); + result = getResourceInGBHours([0]); + assert.equal(result,"0 GB Hours"); + result = getResourceInGBHours([100]); + assert.equal(result,"0 GB Hours"); + result = getResourceInGBHours([-1]); + assert.equal(result,"0 GB Hours"); + result = getResourceInGBHours([33]); + assert.equal(result,"0 GB Hours"); + result = getResourceInGBHours([3080328048302480]); + assert.equal(result,"835592461.020 GB Hours"); +}); diff --git a/web/tests/unit/helpers/gt-test.js b/web/tests/unit/helpers/gt-test.js new file mode 100644 index 000000000..33ac960b6 --- /dev/null +++ b/web/tests/unit/helpers/gt-test.js @@ -0,0 +1,22 @@ + +import { gt } from 'dr-elephant/helpers/gt'; +import { module, test } from 'qunit'; + +module('Unit | Helper | gt'); + +// Replace this with your real tests. +test('it works', function(assert) { + let result = gt([42,30]); + assert.ok(result); + result = gt([30,42]) + assert.ok(!result); + result = gt([-1,30]); + assert.ok(!result); + result = gt([30,-1]); + assert.ok(result); + result = gt([-1,-5]); + assert.ok(result); + result = gt([-9,-5]); + assert.ok(!result); +}); + diff --git a/web/tests/unit/helpers/not-empty-test.js b/web/tests/unit/helpers/not-empty-test.js new file mode 100644 index 000000000..7706f2d1e --- /dev/null +++ b/web/tests/unit/helpers/not-empty-test.js @@ -0,0 +1,27 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import { notEmpty } from 'dr-elephant/helpers/not-empty'; +import { module, test } from 'qunit'; + +module('Unit | Helper | not empty'); + +test('Test for notEmpty helper', function(assert) { + let result = notEmpty(["this is not empty"]); + assert.ok(result); + result = notEmpty([""]); + assert.ok(!result); +}); diff --git a/web/tests/unit/helpers/url-encode-test.js b/web/tests/unit/helpers/url-encode-test.js new file mode 100644 index 000000000..0bccde722 --- /dev/null +++ b/web/tests/unit/helpers/url-encode-test.js @@ -0,0 +1,25 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import { urlEncode } from 'dr-elephant/helpers/url-encode'; +import { module, test } from 'qunit'; + +module('Unit | Helper | url encode'); + +test('Test for urlEncode helper', function(assert) { + let result = urlEncode(["http://localhost:8090?flowid=abc&page=5&heuristic=Mapper Spill Heuristic"]); + assert.equal(result,"http%3A%2F%2Flocalhost%3A8090%3Fflowid%3Dabc%26page%3D5%26heuristic%3DMapper%20Spill%20Heuristic"); +}); diff --git a/web/tests/unit/utils/scheduler-test.js b/web/tests/unit/utils/scheduler-test.js new file mode 100644 index 000000000..99fc46e9d --- /dev/null +++ b/web/tests/unit/utils/scheduler-test.js @@ -0,0 +1,56 @@ +/* + * Copyright 2016 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +import Scheduler from 'dr-elephant/utils/scheduler'; +import { module, test } from 'qunit'; + +module('Unit | Utility | scheduler'); + +test('Test scheduler utility', function(assert) { + var scheduler = new Scheduler(); + var flowdefid = "https://localhost:8443/manager?project=project1&flow=flow1"; + var flowexecid = "https://localhost:8443/executor?execid=1342787"; + var schedulerName = "azkaban"; + assert.equal(scheduler.getFlowName(flowexecid, flowdefid,schedulerName),"azkaban: project1: flow1: 1342787"); + + schedulerName = "oozie"; + assert.equal(scheduler.getFlowName(flowexecid, flowdefid,schedulerName),flowexecid); + + flowdefid = "https://x:y:x" + assert.equal(scheduler.getFlowName(flowexecid, flowdefid,schedulerName),flowexecid); + + flowexecid = "https://x:y:z" + assert.equal(scheduler.getFlowName(flowexecid, flowdefid,schedulerName),flowexecid); + +}); + +test('Test job display name', function(assert) { + var scheduler = new Scheduler(); + var jobdefid = "https://localhost:8443/manager?project=project1&flow=flow1&job=job1" + var jobexecid = "https://localhost:8443/executor?execid=1&job=job1&attempt=0"; + var schedulerName = "azkaban"; + assert.equal(scheduler.getJobDisplayName(jobexecid, jobdefid,schedulerName),"job1: 1"); + + schedulerName = "oozie"; + assert.equal(scheduler.getJobDisplayName(jobexecid, jobdefid,schedulerName),jobexecid); + + jobdefid = "https://x:y:x" + assert.equal(scheduler.getJobDisplayName(jobexecid, jobdefid,schedulerName),jobexecid); + + jobexecid = "https://x:y:z" + assert.equal(scheduler.getJobDisplayName(jobexecid, jobdefid,schedulerName),jobexecid); + +});