Kaynağa Gözat

[pig] Add test for whole log parsing

Romain Rigaux 12 yıl önce
ebeveyn
işleme
888d9cb615
2 değiştirilmiş dosya ile 165 ekleme ve 8 silme
  1. 4 7
      apps/pig/src/pig/api.py
  2. 161 1
      apps/pig/src/pig/tests.py

+ 4 - 7
apps/pig/src/pig/api.py

@@ -14,12 +14,8 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-from filebrowser.views import location_to_url
 
-try:
-  import json
-except ImportError:
-  import simplejson as json
+import json
 import logging
 import re
 import time
@@ -28,11 +24,12 @@ from django.core.urlresolvers import reverse
 from django.utils.html import escape
 from django.utils.translation import ugettext as _
 
-from jobbrowser.views import job_single_logs
 from desktop.lib.view_util import format_duration_in_millis
+from filebrowser.views import location_to_url
+from jobbrowser.views import job_single_logs
+from liboozie.oozie_api import get_oozie
 from oozie.models import Workflow, Pig
 from oozie.views.editor import _submit_workflow
-from liboozie.oozie_api import get_oozie
 
 LOG = logging.getLogger(__name__)
 

+ 161 - 1
apps/pig/src/pig/tests.py

@@ -33,7 +33,7 @@ from liboozie.oozie_api_test import OozieServerProvider
 from oozie.tests import OozieBase
 
 from pig.models import create_or_update_script, PigScript
-from pig.api import OozieApi
+from pig.api import OozieApi, get
 
 
 class TestPigBase(object):
@@ -143,6 +143,29 @@ class TestMock(TestPigBase):
     # Update
     self.c.post(reverse('pig:save'), data=attrs, follow=True)
 
+  def parse_oozie_logs(self):
+    api = get(None, self.user)
+
+    assert_equal(
+'''Run pig script using PigRunner.run() for Pig version 0.8+
+  Apache Pig version 0.11.0-cdh4.4.0-SNAPSHOT (rexported)
+  compiled Jun 30 2013, 03:40:22
+
+  Run pig script using PigRunner.run() for Pig version 0.8+
+  2013-10-09 17:30:39,709 [main] INFO  org.apache.pig.Main  - Apache Pig version 0.11.0-cdh4.4.0-SNAPSHOT (rexported) compiled Jun 30 2013, 03:40:22
+  2013-10-09 17:30:39,709 [main] INFO  org.apache.pig.Main  - Apache Pig version 0.11.0-cdh4.4.0-SNAPSHOT (rexported) compiled Jun 30 2013, 03:40:22
+  2013-10-09 17:30:39,710 [main] INFO  org.apache.pig.Main  - Logging error messages to: /var/lib/hadoop-yarn/cache/yarn/nm-local-dir/usercache/romain/appcache/application_1381360805876_0001/container_1381360805876_0001_01_000002/pig-job_1381360805876_0001.log
+  2013-10-09 17:30:39,710 [main] INFO  org.apache.pig.Main  - Logging error messages to: /var/lib/hadoop-yarn/cache/yarn/nm-local-dir/usercache/romain/appcache/application_1381360805876_0001/container_1381360805876_0001_01_000002/pig-job_1381360805876_0001.log
+  2013-10-09 17:30:39,739 [main] WARN  org.apache.hadoop.conf.Configuration  - dfs.df.interval is deprecated. Instead, use fs.df.interval
+  2013-10-09 17:30:39,739 [main] WARN  org.apache.hadoop.conf.Configuration  - mapred.task.tracker.http.address is deprecated. Instead, use mapreduce.tasktracker.http.address
+  2013-10-09 17:30:39,833 [main] INFO  org.apache.pig.backend.hadoop.executionengine.HExecutionEngine  - Connecting to map-reduce job tracker at: localhost:8032
+  hdfs://localhost:8020/user/romain/.Trash  <dir>
+  hdfs://localhost:8020/user/romain/examples  <dir>
+  hdfs://localhost:8020/user/romain/tweets  <dir>
+  hdfs://localhost:8020/user/romain/wordcount.jar<r 1>  3165
+  hdfs://localhost:8020/user/romain/words  <dir>
+  hdfs://localhost:8020/user/romain/yelp  <dir>''', api._match_logs({'logs': [None, OOZIE_LOGS]}))
+
 
 class TestWithHadoop(OozieBase):
 
@@ -263,3 +286,140 @@ class TestWithHadoop(OozieBase):
 
     self.c.post(reverse('pig:stop'), data={'id': script.id}, follow=True)
     self.wait_until_completion(json.loads(submit_response.content)['id'], expected_status='KILLED')
+
+
+OOZIE_LOGS ="""  Log Type: stdout
+
+  Log Length: 117627
+
+  Oozie Launcher starts
+
+  Heart beat
+  Starting the execution of prepare actions
+  Completed the execution of prepare actions successfully
+
+  Files in current dir:/var/lib/hadoop-yarn/cache/yarn/nm-local-dir/usercache/romain/appcache/application_1381360805876_0001/container_1381360805876_0001_01_000002/.
+  ======================
+  File: commons-cli-1.2.jar
+  File: antlr-runtime-3.4.jar
+  File: stringtemplate-3.2.1.jar
+  File: script.pig
+  File: jyson-1.0.2.jar
+
+  Oozie Java/Map-Reduce/Pig action launcher-job configuration
+  =================================================================
+  Workflow job id   : 0000000-131009162028638-oozie-oozi-W
+  Workflow action id: 0000000-131009162028638-oozie-oozi-W@pig
+
+  Classpath         :
+  ------------------------
+  /var/lib/hadoop-yarn/cache/yarn/nm-local-dir/usercache/romain/appcache/application_1381360805876_0001/container_1381360805876_0001_01_000002
+  /etc/hadoop/conf
+  /usr/lib/hadoop/hadoop-nfs-2.1.0-cdh5.0.0-SNAPSHOT.jar
+  /usr/lib/hadoop/hadoop-common-2.1.0-cdh5.0.0-SNAPSHOT.jar
+  /usr/lib/hadoop/hadoop-auth-2.1.0-cdh5.0.0-SNAPSHOT.jar
+  /usr/lib/hadoop/hadoop-common.jar
+  /var/lib/hadoop-yarn/cache/yarn/nm-local-dir/usercache/romain/appcache/application_1381360805876_0001/container_1381360805876_0001_01_000002/jyson-1.0.2.jar
+  ------------------------
+
+  Main class        : org.apache.oozie.action.hadoop.PigMain
+
+  Maximum output    : 2048
+
+  Arguments         :
+
+  Java System Properties:
+  ------------------------
+  #
+  #Wed Oct 09 17:30:39 PDT 2013
+  java.runtime.name=Java(TM) SE Runtime Environment
+  awt.toolkit=sun.awt.X11.XToolkit
+  java.vm.info=mixed mode
+  java.version=1.7.0_40
+  java.ext.dirs=/usr/lib/jvm/java-7-oracle/jre/lib/ext\:/usr/java/packages/lib/ext
+  sun.boot.class.path=/usr/lib/jvm/java-7-oracle/jre/lib/resources.jar\:/usr/lib/jvm/java-7-oracle/jre/lib/rt.jar\:/usr/lib/jvm/java-7-oracle/jre/lib/sunrsasign.jar\:/usr/lib/jvm/java-7-oracle/jre/lib/jsse.jar\:/usr/lib/jvm/java-7-oracle/jre/lib/jce.jar\:/usr/lib/jvm/java-7-oracle/jre/lib/charsets.jar\:/usr/lib/jvm/java-7-oracle/jre/lib/jfr.jar\:/usr/lib/jvm/java-7-oracle/jre/classes
+  java.vendor=Oracle Corporation
+  file.separator=/
+  oozie.launcher.job.id=job_1381360805876_0001
+  oozie.action.stats.properties=/var/lib/hadoop-yarn/cache/yarn/nm-local-dir/usercache/romain/appcache/application_1381360805876_0001/container_1381360805876_0001_01_000002/stats.properties
+  java.vendor.url.bug=http\://bugreport.sun.com/bugreport/
+  sun.io.unicode.encoding=UnicodeLittle
+  sun.cpu.endian=little
+  sun.cpu.isalist=
+  ------------------------
+
+  =================================================================
+
+  >>> Invoking Main class now >>>
+
+
+  Oozie Pig action configuration
+  =================================================================
+  ------------------------
+  Setting env property for mapreduce.job.credentials.binary to:/var/lib/hadoop-yarn/cache/yarn/nm-local-dir/usercache/romain/appcache/application_1381360805876_0001/container_1381360805876_0001_01_000002/container_tokens
+  ------------------------
+  pig.properties:
+  --------------------
+  mapreduce.job.ubertask.enable : false
+  yarn.resourcemanager.max-completed-applications : 10000
+  yarn.resourcemanager.delayed.delegation-token.removal-interval-ms : 30000
+  yarn.nodemanager.delete.debug-delay-sec : 0
+  hadoop.ssl.require.client.cert : false
+  dfs.datanode.max.transfer.threads : 4096
+  --------------------
+
+  Pig script [script.pig] content:
+  ------------------------
+  ls
+  ------------------------
+
+  Current (local) dir = /var/lib/hadoop-yarn/cache/yarn/nm-local-dir/usercache/romain/appcache/application_1381360805876_0001/container_1381360805876_0001_01_000002
+  Pig command arguments :
+  -file
+  script.pig
+  -log4jconf
+  /var/lib/hadoop-yarn/cache/yarn/nm-local-dir/usercache/romain/appcache/application_1381360805876_0001/container_1381360805876_0001_01_000002/piglog4j.properties
+  -logfile
+  pig-job_1381360805876_0001.log
+  =================================================================
+
+  >>> Invoking Pig command line now >>>
+
+
+  Run pig script using PigRunner.run() for Pig version 0.8+
+  Apache Pig version 0.11.0-cdh4.4.0-SNAPSHOT (rexported)
+  compiled Jun 30 2013, 03:40:22
+
+  Run pig script using PigRunner.run() for Pig version 0.8+
+  2013-10-09 17:30:39,709 [main] INFO  org.apache.pig.Main  - Apache Pig version 0.11.0-cdh4.4.0-SNAPSHOT (rexported) compiled Jun 30 2013, 03:40:22
+  2013-10-09 17:30:39,709 [main] INFO  org.apache.pig.Main  - Apache Pig version 0.11.0-cdh4.4.0-SNAPSHOT (rexported) compiled Jun 30 2013, 03:40:22
+  2013-10-09 17:30:39,710 [main] INFO  org.apache.pig.Main  - Logging error messages to: /var/lib/hadoop-yarn/cache/yarn/nm-local-dir/usercache/romain/appcache/application_1381360805876_0001/container_1381360805876_0001_01_000002/pig-job_1381360805876_0001.log
+  2013-10-09 17:30:39,710 [main] INFO  org.apache.pig.Main  - Logging error messages to: /var/lib/hadoop-yarn/cache/yarn/nm-local-dir/usercache/romain/appcache/application_1381360805876_0001/container_1381360805876_0001_01_000002/pig-job_1381360805876_0001.log
+  2013-10-09 17:30:39,739 [main] WARN  org.apache.hadoop.conf.Configuration  - dfs.df.interval is deprecated. Instead, use fs.df.interval
+  2013-10-09 17:30:39,739 [main] WARN  org.apache.hadoop.conf.Configuration  - mapred.task.tracker.http.address is deprecated. Instead, use mapreduce.tasktracker.http.address
+  2013-10-09 17:30:39,833 [main] INFO  org.apache.pig.backend.hadoop.executionengine.HExecutionEngine  - Connecting to map-reduce job tracker at: localhost:8032
+  hdfs://localhost:8020/user/romain/.Trash  <dir>
+  hdfs://localhost:8020/user/romain/examples  <dir>
+  hdfs://localhost:8020/user/romain/tweets  <dir>
+  hdfs://localhost:8020/user/romain/wordcount.jar<r 1>  3165
+  hdfs://localhost:8020/user/romain/words  <dir>
+  hdfs://localhost:8020/user/romain/yelp  <dir>
+
+  <<< Invocation of Pig command completed <<<
+
+  Hadoop Job IDs executed by Pig:
+
+
+  <<< Invocation of Main class completed <<<
+
+
+  Oozie Launcher ends
+
+  2013-10-09 17:30:40,009 [main] INFO  org.apache.hadoop.mapred.Task  - Task:attempt_1381360805876_0001_m_000000_0 is done. And is in the process of committing
+  2013-10-09 17:30:40,087 [main] INFO  org.apache.hadoop.mapred.Task  - Task attempt_1381360805876_0001_m_000000_0 is allowed to commit now
+  2013-10-09 17:30:40,094 [main] INFO  org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter  - Saved output of task 'attempt_1381360805876_0001_m_000000_0' to hdfs://localhost:8020/user/romain/oozie-oozi/0000000-131009162028638-oozie-oozi-W/pig--pig/output/_temporary/1/task_1381360805876_0001_m_000000
+  2013-10-09 17:30:40,153 [main] INFO  org.apache.hadoop.mapred.Task  - Task 'attempt_1381360805876_0001_m_000000_0' done.
+  2013-10-09 17:30:40,254 [main] INFO  org.apache.hadoop.metrics2.impl.MetricsSystemImpl  - Stopping MapTask metrics system...
+  2013-10-09 17:30:40,257 [main] INFO  org.apache.hadoop.metrics2.impl.MetricsSystemImpl  - MapTask metrics system stopped.
+  2013-10-09 17:30:40,257 [main] INFO  org.apache.hadoop.metrics2.impl.MetricsSystemImpl  - MapTask metrics system shutdown complete.
+"""