vvcephei avatar vvcephei committed a751d33

adding thesis stuff

Comments (0)

Files changed (7)

bin/parse_updoutput.py

+#!/usr/bin/python
+import re, sys
+
+DEBUG=False
+NaN = float('nan')
+statRE=re.compile(r"^STAT: (\S+) (\S+) (.*)")
+(fold, prep, exp, n, acc, fpos, fneg) = (None,)*7
+skip=True
+folds = set()
+tables = {}
+
+def debug(s):
+  if DEBUG:
+    print('>'+str(s))
+
+for line in sys.stdin.readlines():
+  line = line.rstrip()
+  m=statRE.match(line)
+  if (m):
+    (fold, prep, exp) = m.groups()
+    folds.add(fold)
+    if not prep in tables:
+      tables[prep] = {}
+    if not exp in tables[prep]:
+      tables[prep][exp] = {}
+    for i in "n acc fpos fneg".split(" "):
+      if not i in tables[prep][exp]:
+        tables[prep][exp][i] = {}
+      if not fold in tables[prep][exp][i]:
+        tables[prep][exp][i][fold] = NaN
+    skip=False
+    continue
+  if re.match(r"^Per",line):
+    skip=True
+    continue
+  if (skip): continue
+  m= re.match(r"^\s+N\s+(\d+)",line)
+  if (m):
+    (n,)=m.groups()
+    tables[prep][exp]['n'][fold] = float(n)
+    debug(tables[prep][exp]['n'])
+    continue
+  m= re.match(r"^\s+Accuracy\s+(\d+.\d+)",line)
+  if (m):
+    (acc,)=m.groups()
+    tables[prep][exp]['acc'][fold] = float(acc)
+    debug(tables[prep][exp]['acc'])
+    continue
+  m= re.match(r"^\s+positive .* (\d+\.\d+)$",line)
+  if (m):
+    (fpos,)=m.groups()
+    tables[prep][exp]['fpos'][fold] = float(fpos)
+    debug(tables[prep][exp]['fpos'])
+    continue
+  m= re.match(r"^\s+negative .* (\d+\.\d+)$",line)
+  if (m):
+    (fneg,)=m.groups()
+    tables[prep][exp]['fneg'][fold] = float(fneg)
+    debug(tables[prep][exp]['fneg'])
+    continue
+  #m= re.match(r"^Exception",line)
+  #if (m):
+    #tables[prep][exp]['n'] = NaN
+    #tables[prep][exp]['acc'] = NaN
+    #tables[prep][exp]['fpos'] = NaN
+    #tables[prep][exp]['fneg'] = NaN
+    #debug("exception")
+    #continue
+
+prep_set = set(tables.keys())
+exp_set = set()
+val_set = set()
+fold_set = folds
+for prep in tables:
+  for exp in tables[prep]:
+    exp_set.add(exp)
+    for val in tables[prep][exp]:
+      val_set.add(val)
+prep_set = sorted(list(prep_set))
+exp_set = sorted(list(exp_set))
+val_set = sorted(list(val_set))
+fold_set = sorted(list(fold_set))
+
+sums={}
+def print_csv_itable(fold,val):
+  lines = ['']*(1+len(prep_set))
+  lines[0] += '"(%s)",'%(val)
+  for i,prep in enumerate(prep_set):
+    lines[1+i] += '"%s",'%str(prep)
+
+  for exp in exp_set:
+    lines[0] += '"%s",'%str(exp)
+    for i,prep in enumerate(prep_set):
+      if not prep in sums: sums[prep] = {}
+      if not exp in sums[prep]: sums[prep][exp] = {}
+      if not val in sums[prep][exp]: sums[prep][exp][val] = 0.0
+      if prep in tables and exp in tables[prep] and val in tables[prep][exp] and fold in tables[prep][exp][val]:
+        sums[prep][exp][val] += tables[prep][exp][val][fold]
+        if val == 'n':
+          try: 
+            lines[1+i] += '%d,'%int(tables[prep][exp][val][fold])
+          except:
+            lines[1+i] += '%.2f,'%(tables[prep][exp][val][fold])
+        else:
+          lines[1+i] += '%.2f,'%(tables[prep][exp][val][fold])
+      else: lines[1+i] += ','
+  return lines
+
+def print_csv_itable_sums(val):
+  lines = ['']*(1+len(prep_set))
+  lines[0] += '"(%s)",'%(val)
+  for i,prep in enumerate(prep_set):
+    lines[1+i] += '"%s",'%str(prep)
+
+  for exp in exp_set:
+    lines[0] += '"%s",'%str(exp)
+    for i,prep in enumerate(prep_set):
+      lines[1+i] += '%.2f,'%(sums[prep][exp][val] / len(fold_set))
+  return lines
+
+def print_csv():
+  sections = []
+  for fold in fold_set:
+    lines = [ l + ',' for l in print_csv_itable(fold,'n')]
+    lines = [ l + r + ',' for (l,r) in zip(lines, print_csv_itable(fold,'acc'))]
+    lines = [ l + r + ',' for (l,r) in zip(lines, print_csv_itable(fold,'fpos'))]
+    lines = [ l + r for (l,r) in zip(lines, print_csv_itable(fold,'fneg'))]
+    sections.append((fold,lines))
+  lines2 = [ l + ',' for l in print_csv_itable_sums('n')]
+  lines2 = [ l + r + ',' for (l,r) in zip(lines2, print_csv_itable_sums('acc'))]
+  lines2 = [ l + r + ',' for (l,r) in zip(lines2, print_csv_itable_sums('fpos'))]
+  lines2 = [ l + r for (l,r) in zip(lines2, print_csv_itable_sums('fneg'))]
+  sections.append(("AVERAGE",lines2))
+  return sections
+
+sections = print_csv()
+for name,lines in sections:
+  print("\n"+name)
+  for line in lines:
+    print(line)
+
+sys.exit()
+
+
+
+import pprint
+pprint.pprint(tables)
+

bin/run-hcr-lda.sh

+#!/bin/bash
+
+iter=1000
+beta=0.01
+nFold=3
+for k in 3 10 100; do \
+  alpha=$( echo - | awk "{ print 50/$k }" )
+  
+  echo Maxent k=$k
+  updown run updown.app.experiment.topic.lda.NFoldDiscriminantLDAExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --name Lda"$k"MaxentHcrBi -g $UPDOWN_DIR/out/data/hcr.train_dev.unlabeled.bigrams.updown 
+  updown run updown.app.experiment.topic.lda.NFoldDiscriminantLDAExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --name Lda"$k"MaxentHcr -g $UPDOWN_DIR/out/data/hcr.train_dev.unlabeled.noBigrams.updown 
+  
+  for KNN in 5 11 21; do \
+    echo iter=$iter alpha=$alpha beta=$beta k=$k KNN=$KNN name=Lda"$k"Knn"$KNN"Hcr
+    date
+    
+    updown run updown.app.experiment.topic.lda.NFoldKNNDiscriminantExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --numNearestNeighbors $KNN --name Lda"$k"Knn"$KNN"HcrBi -g $UPDOWN_DIR/out/data/hcr.train_dev.unlabeled.bigrams.updown 
+    updown run updown.app.experiment.topic.lda.NFoldKNNDiscriminantExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --numNearestNeighbors $KNN --name Lda"$k"Knn"$KNN"Hcr -g $UPDOWN_DIR/out/data/hcr.train_dev.unlabeled.noBigrams.updown 
+    
+  done
+done

bin/run-mdsd-lda.sh

+#!/bin/bash
+
+iter=1000
+beta=0.01
+nFold=3
+for k in 100; do \
+  alpha=$( echo - | awk "{ print 50/$k }" )
+  
+  updown run updown.app.experiment.topic.lda.NFoldDiscriminantLDAExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --name Lda"$k"MaxentMdsdBooksBi -g $UPDOWN_DIR/out/data/mdsd.books.unlabeled.bigrams.updown 
+  updown run updown.app.experiment.topic.lda.NFoldDiscriminantLDAExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --name Lda"$k"MaxentMdsdBooks -g $UPDOWN_DIR/out/data/mdsd.books.unlabeled.noBigrams.updown 
+  
+  updown run updown.app.experiment.topic.lda.NFoldDiscriminantLDAExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --name Lda"$k"MaxentMdsdDvdBi -g $UPDOWN_DIR/out/data/mdsd.dvd.unlabeled.bigrams.updown 
+  updown run updown.app.experiment.topic.lda.NFoldDiscriminantLDAExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --name Lda"$k"MaxentMdsdDvd -g $UPDOWN_DIR/out/data/mdsd.dvd.unlabeled.noBigrams.updown 
+  
+  updown run updown.app.experiment.topic.lda.NFoldDiscriminantLDAExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --name Lda"$k"MaxentMdsdElecBi -g $UPDOWN_DIR/out/data/mdsd.electronics.unlabeled.bigrams.updown 
+  updown run updown.app.experiment.topic.lda.NFoldDiscriminantLDAExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --name Lda"$k"MaxentMdsdElec -g $UPDOWN_DIR/out/data/mdsd.electronics.unlabeled.noBigrams.updown 
+  
+  updown run updown.app.experiment.topic.lda.NFoldDiscriminantLDAExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --name Lda"$k"MaxentMdsdKitchenBi -g $UPDOWN_DIR/out/data/mdsd.kitchen.unlabeled.bigrams.updown 
+  updown run updown.app.experiment.topic.lda.NFoldDiscriminantLDAExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --name Lda"$k"MaxentMdsdKitchen -g $UPDOWN_DIR/out/data/mdsd.kitchen.unlabeled.noBigrams.updown 
+  
+  for KNN in 5 11 21; do \
+    echo iter=$iter alpha=$alpha beta=$beta k=$k KNN=$KNN name=Lda"$k"Knn"$KNN"MdsdBooks
+    date
+    
+    updown run updown.app.experiment.topic.lda.NFoldKNNDiscriminantExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --numNearestNeighbors $KNN --name Lda"$k"Knn"$KNN"MdsdBooksBi -g $UPDOWN_DIR/out/data/mdsd.books.unlabeled.bigrams.updown 
+    updown run updown.app.experiment.topic.lda.NFoldKNNDiscriminantExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --numNearestNeighbors $KNN --name Lda"$k"Knn"$KNN"MdsdBooks -g $UPDOWN_DIR/out/data/mdsd.books.unlabeled.noBigrams.updown 
+    
+    updown run updown.app.experiment.topic.lda.NFoldKNNDiscriminantExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --numNearestNeighbors $KNN --name Lda"$k"Knn"$KNN"MdsdDvdBi -g $UPDOWN_DIR/out/data/mdsd.dvd.unlabeled.bigrams.updown 
+    updown run updown.app.experiment.topic.lda.NFoldKNNDiscriminantExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --numNearestNeighbors $KNN --name Lda"$k"Knn"$KNN"MdsdDvd -g $UPDOWN_DIR/out/data/mdsd.dvd.unlabeled.noBigrams.updown 
+    
+    updown run updown.app.experiment.topic.lda.NFoldKNNDiscriminantExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --numNearestNeighbors $KNN --name Lda"$k"Knn"$KNN"MdsdElecBi -g $UPDOWN_DIR/out/data/mdsd.electronics.unlabeled.bigrams.updown 
+    updown run updown.app.experiment.topic.lda.NFoldKNNDiscriminantExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --numNearestNeighbors $KNN --name Lda"$k"Knn"$KNN"MdsdElec -g $UPDOWN_DIR/out/data/mdsd.electronics.unlabeled.noBigrams.updown 
+    
+    updown run updown.app.experiment.topic.lda.NFoldKNNDiscriminantExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --numNearestNeighbors $KNN --name Lda"$k"Knn"$KNN"MdsdKitchenBi -g $UPDOWN_DIR/out/data/mdsd.kitchen.unlabeled.bigrams.updown 
+    updown run updown.app.experiment.topic.lda.NFoldKNNDiscriminantExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --numNearestNeighbors $KNN --name Lda"$k"Knn"$KNN"MdsdKitchen -g $UPDOWN_DIR/out/data/mdsd.kitchen.unlabeled.noBigrams.updown 
+  done
+done
+#!/bin/bash
+
+path=/data/mdsd/processed_acl
+out=out/data
+stoplist=src/main/resources/eng/dictionary/stoplist.txt
+
+CMD=$1
+shift
+
+case $CMD in 
+  preproc)
+    for domain in $path/*; do
+      domain=$(basename $domain)
+      outfile=$out/mdsd.$domain.combined.updown
+      echo generating $outfile
+      for pipe in "twok" "twok_stop" "twok_1600V" "basic" "basic_stop" "basic_1600V"; do
+        case $pipe in
+          twok) 
+            pipeline="twokenize"
+            vocab=""
+            ;;
+          twok_stop) 
+            pipeline="twokenize|removeStopwords"
+            vocab=""
+            ;;
+          twok_1600V)
+            pipeline="twokenize"
+            vocab="--vocabSize 1600"
+            ;;
+          basic) 
+            pipeline="basicTokenize"
+            vocab=""
+            ;;
+          basic_stop) 
+            pipeline="basicTokenize|removeStopwords"
+            vocab=""
+            ;;
+          basic_1600V)
+            pipeline="basicTokenize"
+            vocab="--vocabSize 1600"
+            ;;
+        esac
+        echo "STAT: $domain $mode $pipe"
+        echo updown run updown.preproc.impl.PreprocMDSDReviews --textPipeline $pipeline $vocab -f $out/mdsd.$pipe.$domain.combined.updown $path/$domain/*
+        updown run updown.preproc.impl.PreprocMDSDReviews --textPipeline $pipeline $vocab -f $out/mdsd.$pipe.$domain.combined.updown $path/$domain/*
+        cat $out/mdsd.$pipe.$domain.combined.updown | ~/repos/updown/make_split.py $out/mdsd.$pipe.$domain.train.updown $out/mdsd.$pipe.$domain.test.updown
+        wc -l $out/mdsd.$pipe.$domain.train.updown $out/mdsd.$pipe.$domain.test.updown
+
+      done
+    done
+    ;;
+  eval)
+    for domain in $path/*; do
+      domain=$(basename $domain)
+      for pipe in "twok" "twok_stop" "twok_1600V" "basic" "basic_stop" "basic_1600V"; do
+        TRAIN="-G $out/mdsd.$pipe.$domain.train.updown -G $out/../nodistribute/bb7_noNeut"
+        TEST="-g $out/mdsd.$pipe.$domain.test.updown"
+        for k in 25 50 75 100; do
+          alpha=$( echo - | awk "{ print 50/$k }" )
+          echo "STAT: $domain $pipe lda $k"
+          updown 3 run updown.app.experiment.topic.lda.SplitLDAMaxentExperiment --numTopics $k --alpha $alpha --beta 0.01 --iterations 100  $TRAIN $TEST
+        done
+        echo "STAT: $domain $pipe maxent"
+        updown run updown.app.experiment.maxent.SplitMaxentExperiment  $TRAIN $TEST
+      done
+    done
+    ;;
+esac

bin/run-shamma-lda.sh

+#!/bin/bash
+
+iter=1000
+beta=0.01
+nFold=3
+for k in 3 10 100; do \
+  alpha=$( echo - | awk "{ print 50/$k }" )
+  
+  echo Maxent k=$k
+  updown run updown.app.experiment.topic.lda.NFoldDiscriminantLDAExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --name Lda"$k"MaxentShammaBi -g $UPDOWN_DIR/out/data/shamma.all.unlabeled.bigrams.updown 
+  updown run updown.app.experiment.topic.lda.NFoldDiscriminantLDAExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --name Lda"$k"MaxentShamma -g $UPDOWN_DIR/out/data/shamma.all.unlabeled.noBigrams.updown 
+  
+  for KNN in 5 11 21; do \
+    echo iter=$iter alpha=$alpha beta=$beta k=$k KNN=$KNN name=Lda"$k"Knn"$KNN"Shamma
+    date
+    
+    updown run updown.app.experiment.topic.lda.NFoldKNNDiscriminantExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --numNearestNeighbors $KNN --name Lda"$k"Knn"$KNN"ShammaBi -g $UPDOWN_DIR/out/data/shamma.all.unlabeled.bigrams.updown 
+    updown run updown.app.experiment.topic.lda.NFoldKNNDiscriminantExperiment -n $nFold --format tex --alpha $alpha --beta $beta --iterations $iter --numTopics $k --numNearestNeighbors $KNN --name Lda"$k"Knn"$KNN"Shamma -g $UPDOWN_DIR/out/data/shamma.all.unlabeled.noBigrams.updown 
+    
+  done
+done

src/main/scala/updown/util/MalletModel.scala

+package updown.util
+
+import updown.data.{SentimentLabel, GoldLabeledTweet}
+import cc.mallet.types._
+
+abstract class MalletModel {
+  protected def getInstanceList(tweetList: List[GoldLabeledTweet]): (Alphabet, InstanceList) = {
+    val alphabet = new Alphabet()
+    val labelAlphabet = new Alphabet()
+    val instances = (for (tweet <- tweetList) yield {
+      tweet match {
+        case GoldLabeledTweet(id, userid, features, goldLabel) =>
+          val featureSequence = new FeatureSequence(alphabet, features.length)
+          for (feature <- features) {
+            featureSequence.add(feature)
+          }
+          val label = new FeatureVector(
+            labelAlphabet,
+            Array[Object]("label"), Array[Double](SentimentLabel.toDouble(goldLabel)))
+          new Instance(featureSequence, label, id, null)
+      }
+    }).toList
+
+    val instanceList = new InstanceList(alphabet, null)
+    for (instance <- instances) {
+      instanceList.add(instance)
+    }
+    (alphabet, instanceList)
+  }
+
+  protected def getInstanceList(tweetList: List[GoldLabeledTweet], alphabet: Alphabet) = {
+    val instances = (for (tweet <- tweetList) yield {
+      tweet match {
+        case GoldLabeledTweet(id, userid, features, goldLabel) =>
+          val featureSequence = new FeatureSequence(alphabet, features.length)
+          for (feature <- features) {
+            featureSequence.add(feature)
+          }
+          new Instance(featureSequence, goldLabel, id, null)
+      }
+    }).toList
+
+    val instanceList = new InstanceList(alphabet, null)
+    for (instance <- instances) {
+      instanceList.add(instance)
+    }
+    instanceList
+  }
+
+  val getLabelNameArray = Array[Object](
+    SentimentLabel.toEnglishName(SentimentLabel.Positive),
+    SentimentLabel.toEnglishName(SentimentLabel.Neutral),
+    SentimentLabel.toEnglishName(SentimentLabel.Negative)
+  )
+  val getLabelFeatureArray: SentimentLabel.Type => Array[Double] =
+    (label: SentimentLabel.Type) => {
+      label match {
+        case SentimentLabel.Positive => Array[Double](1.0, 0.0, 0.0)
+        case SentimentLabel.Neutral => Array[Double](0.0, 1.0, 0.0)
+        case SentimentLabel.Negative => Array[Double](0.0, 0.0, 1.0)
+      }
+    }
+
+  protected def getInstanceListWithLabelVectors(tweetList: List[GoldLabeledTweet]): (Alphabet, InstanceList) = {
+    val alphabet = new Alphabet()
+    val labelAlphabet = new Alphabet()
+    val instances = (for (tweet <- tweetList) yield {
+      tweet match {
+        case GoldLabeledTweet(id, userid, features, goldLabel) =>
+          val featureSequence = new FeatureSequence(alphabet, features.length)
+          for (feature <- features) {
+            featureSequence.add(feature)
+          }
+          val label = new FeatureVector(
+            labelAlphabet,
+            getLabelNameArray,
+            getLabelFeatureArray(goldLabel)
+          )
+          new Instance(featureSequence, label, id, null)
+      }
+    }).toList
+
+    val instanceList = new InstanceList(alphabet, null)
+    for (instance <- instances) {
+      instanceList.add(instance)
+    }
+    (alphabet, instanceList)
+  }
+}

src/main/tex/results.tex

+\documentclass[letterpaper]{article}
+\usepackage{amssymb,amsmath,amsthm, verbatim}
+
+%result variabes
+\include{results-variables}
+
+\author{John Roesler}
+\title{Tables for Thesis}
+\begin{document}
+\maketitle
+\begin{table}[htbp]
+    \begin{center}
+        \begin{tabular}{ | l || c  c  c  c  c  c | }
+            \hline
+            Experiment & MDSD:Book   & MDSD:DVD & MDSD:Elec & MDSD:Kitchen & HCR & Shamma \\
+            N          &\nNbMdsdBooks  &\nNbMdsdDvd &\nNbMdsdElec &\nNbMdsdKitchen &\nNbHcr &\nNbShamma \\ \hline \hline
+            MPQA       &\accLexMdsdBooks  &\accLexMdsdDvd &\accLexMdsdElec &\accLexMdsdKitchen &\accLexHcr &\accLexShamma \\
+            MPQA bigrams&\accLexMdsdBooksBi  &\accLexMdsdDvdBi &\accLexMdsdElecBi &\accLexMdsdKitchenBi &\accLexHcrBi &\accLexShammaBi \\
+            NBayes       &\accNbMdsdBooks  &\accNbMdsdDvd &\accNbMdsdElec &\accNbMdsdKitchen &\accNbHcr &\accNbShamma \\
+            NBayes bigrams&\accNbMdsdBooksBi  &\accNbMdsdDvdBi &\accNbMdsdElecBi &\accNbMdsdKitchenBi &\accNbHcrBi &\accNbShammaBi \\
+            Maxent       &\accMeMdsdBooks  &\accMeMdsdDvd &\accMeMdsdElec &\accMeMdsdKitchen &\accMeHcr &\accMeShamma \\
+            Maxent bigrams&\accMeMdsdBooksBi  &\accMeMdsdDvdBi &\accMeMdsdElecBi &\accMeMdsdKitchenBi &\accMeHcrBi &\accMeShammaBi \\
+            \hline
+        \end{tabular}
+    \end{center}
+    \caption{Overall Accuracy}
+    \label{table:oacc}
+\end{table}
+
+\begin{table}[htbp]
+    \begin{center}
+        \begin{tabular}{ | l || c  c  c  c  c  c | }
+            \hline
+            Experiment & MDSD:Book   & MDSD:DVD & MDSD:Elec & MDSD:Kitchen & HCR & Shamma \\
+            N          &\nNbMdsdBooks  &\nNbMdsdDvd &\nNbMdsdElec &\nNbMdsdKitchen &\nNbHcr &\nNbShamma \\ \hline \hline
+            MPQA       &\fposLexMdsdBooks  &\fposLexMdsdDvd &\fposLexMdsdElec &\fposLexMdsdKitchen &\fposLexHcr &\fposLexShamma \\
+            MPQA bigrams&\fposLexMdsdBooksBi  &\fposLexMdsdDvdBi &\fposLexMdsdElecBi &\fposLexMdsdKitchenBi &\fposLexHcrBi &\fposLexShammaBi \\
+            NBayes       &\fposNbMdsdBooks  &\fposNbMdsdDvd &\fposNbMdsdElec &\fposNbMdsdKitchen &\fposNbHcr &\fposNbShamma \\
+            NBayes bigrams&\fposNbMdsdBooksBi  &\fposNbMdsdDvdBi &\fposNbMdsdElecBi &\fposNbMdsdKitchenBi &\fposNbHcrBi &\fposNbShammaBi \\
+            Maxent       &\fposMeMdsdBooks  &\fposMeMdsdDvd &\fposMeMdsdElec &\fposMeMdsdKitchen &\fposMeHcr &\fposMeShamma \\
+            Maxent bigrams&\fposMeMdsdBooksBi  &\fposMeMdsdDvdBi &\fposMeMdsdElecBi &\fposMeMdsdKitchenBi &\fposMeHcr &\fposMeShammaBi \\
+            \hline
+        \end{tabular}
+    \end{center}
+    \caption{F1-Score for POS}
+    \label{table:f1pos}
+\end{table}
+
+\begin{table}[htbp]
+    \begin{center}
+        \begin{tabular}{ | l || c  c  c  c  c  c | }
+            \hline
+            Experiment & MDSD:Book   & MDSD:DVD & MDSD:Elec & MDSD:Kitchen & HCR & Shamma \\
+            N          &\nNbMdsdBooks  &\nNbMdsdDvd &\nNbMdsdElec &\nNbMdsdKitchen &\nNbHcr &\nNbShamma \\ \hline \hline
+            MPQA       &\fnegLexMdsdBooks  &\fnegLexMdsdDvd &\fnegLexMdsdElec &\fnegLexMdsdKitchen &\fnegLexHcr &\fnegLexShamma \\
+            MPQA bigrams&\fnegLexMdsdBooksBi  &\fnegLexMdsdDvdBi &\fnegLexMdsdElecBi &\fnegLexMdsdKitchenBi &\fnegLexHcrBi &\fnegLexShammaBi \\
+            NBayes       &\fnegNbMdsdBooks  &\fnegNbMdsdDvd &\fnegNbMdsdElec &\fnegNbMdsdKitchen &\fnegNbHcr &\fnegNbShamma \\
+            NBayes bigrams&\fnegNbMdsdBooksBi  &\fnegNbMdsdDvdBi &\fnegNbMdsdElecBi &\fnegNbMdsdKitchenBi &\fnegNbHcrBi &\fnegNbShammaBi \\
+            Maxent       &\fnegMeMdsdBooks  &\fnegMeMdsdDvd &\fnegMeMdsdElec &\fnegMeMdsdKitchen &\fnegMeHcr &\fnegMeShamma \\
+            Maxent bigrams&\fnegMeMdsdBooksBi  &\fnegMeMdsdDvdBi &\fnegMeMdsdElecBi &\fnegMeMdsdKitchenBi &\fnegMeHcrBi &\fnegMeShammaBi \\
+            \hline
+        \end{tabular}
+    \end{center}
+    \caption{F1-Score for NEG}
+    \label{table:f1neg}
+\end{table}
+
+\begin{table}[htbp]
+    \begin{center}
+        \begin{tabular}{ | r || c  c  c | }
+            \hline
+            N & \multicolumn{3}{|l|}{1446}   \\
+            Accuracy & \multicolumn{3}{|l|}{0.90}   \\ \hline \hline
+            Label & Precision & Recall & F-Score \\ \hline
+            POS   & 1.00      & 0.96   & 0.98    \\
+            NEG   & 0.82      & 0.94   & 0.87    \\
+            NEU   & 0.89      & 0.78   & 0.83    \\
+            \hline
+        \end{tabular}
+    \end{center}
+    \caption{Baseline--Maxent}
+    \label{table:cer2kbaseline}
+\end{table}
+\end{document}
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.