Java 类org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo 实例源码

项目:hadoop    文件:ConfBlock.java   
@Override protected void render(Block html) {
  String jid = $(JOB_ID);
  if (jid.isEmpty()) {
    html.
      p()._("Sorry, can't do anything without a JobID.")._();
    return;
  }
  JobId jobID = MRApps.toJobID(jid);
  Job job = appContext.getJob(jobID);
  if (job == null) {
    html.
      p()._("Sorry, ", jid, " not found.")._();
    return;
  }
  Path confPath = job.getConfFile();
  try {
    ConfInfo info = new ConfInfo(job);

    html.div().h3(confPath.toString())._();
    TBODY<TABLE<Hamlet>> tbody = html.
      // Tasks table
    table("#conf").
      thead().
        tr().
          th(_TH, "key").
          th(_TH, "value").
          th(_TH, "source chain").
        _().
      _().
    tbody();
    for (ConfEntryInfo entry : info.getProperties()) {
      StringBuffer buffer = new StringBuffer();
      String[] sources = entry.getSource();
      //Skip the last entry, because it is always the same HDFS file, and
      // output them in reverse order so most recent is output first
      boolean first = true;
      for(int i = (sources.length  - 2); i >= 0; i--) {
        if(!first) {
          // \u2B05 is an arrow <--
          buffer.append(" \u2B05 ");
        }
        first = false;
        buffer.append(sources[i]);
      }
      tbody.
        tr().
          td(entry.getName()).
          td(entry.getValue()).
          td(buffer.toString()).
        _();
    }
    tbody._().
    tfoot().
      tr().
        th().input("search_init").$type(InputType.text).$name("key").$value("key")._()._().
        th().input("search_init").$type(InputType.text).$name("value").$value("value")._()._().
        th().input("search_init").$type(InputType.text).$name("source chain").$value("source chain")._()._().
        _().
      _().
    _();
  } catch(IOException e) {
    LOG.error("Error while reading "+confPath, e);
    html.p()._("Sorry got an error while reading conf file. ",confPath);
  }
}
项目:aliyun-oss-hadoop-fs    文件:ConfBlock.java   
@Override protected void render(Block html) {
  String jid = $(JOB_ID);
  if (jid.isEmpty()) {
    html.
      p()._("Sorry, can't do anything without a JobID.")._();
    return;
  }
  JobId jobID = MRApps.toJobID(jid);
  Job job = appContext.getJob(jobID);
  if (job == null) {
    html.
      p()._("Sorry, ", jid, " not found.")._();
    return;
  }
  Path confPath = job.getConfFile();
  try {
    ConfInfo info = new ConfInfo(job);

    html.div().h3(confPath.toString())._();
    TBODY<TABLE<Hamlet>> tbody = html.
      // Tasks table
    table("#conf").
      thead().
        tr().
          th(_TH, "key").
          th(_TH, "value").
          th(_TH, "source chain").
        _().
      _().
    tbody();
    for (ConfEntryInfo entry : info.getProperties()) {
      StringBuffer buffer = new StringBuffer();
      String[] sources = entry.getSource();
      //Skip the last entry, because it is always the same HDFS file, and
      // output them in reverse order so most recent is output first
      boolean first = true;
      for(int i = (sources.length  - 2); i >= 0; i--) {
        if(!first) {
          buffer.append(" <- ");
        }
        first = false;
        buffer.append(sources[i]);
      }
      tbody.
        tr().
          td(entry.getName()).
          td(entry.getValue()).
          td(buffer.toString()).
        _();
    }
    tbody._().
    tfoot().
      tr().
        th().input("search_init").$type(InputType.text).$name("key").$value("key")._()._().
        th().input("search_init").$type(InputType.text).$name("value").$value("value")._()._().
        th().input("search_init").$type(InputType.text).$name("source chain").$value("source chain")._()._().
        _().
      _().
    _();
  } catch(IOException e) {
    LOG.error("Error while reading "+confPath, e);
    html.p()._("Sorry got an error while reading conf file. ",confPath);
  }
}
项目:big-c    文件:ConfBlock.java   
@Override protected void render(Block html) {
  String jid = $(JOB_ID);
  if (jid.isEmpty()) {
    html.
      p()._("Sorry, can't do anything without a JobID.")._();
    return;
  }
  JobId jobID = MRApps.toJobID(jid);
  Job job = appContext.getJob(jobID);
  if (job == null) {
    html.
      p()._("Sorry, ", jid, " not found.")._();
    return;
  }
  Path confPath = job.getConfFile();
  try {
    ConfInfo info = new ConfInfo(job);

    html.div().h3(confPath.toString())._();
    TBODY<TABLE<Hamlet>> tbody = html.
      // Tasks table
    table("#conf").
      thead().
        tr().
          th(_TH, "key").
          th(_TH, "value").
          th(_TH, "source chain").
        _().
      _().
    tbody();
    for (ConfEntryInfo entry : info.getProperties()) {
      StringBuffer buffer = new StringBuffer();
      String[] sources = entry.getSource();
      //Skip the last entry, because it is always the same HDFS file, and
      // output them in reverse order so most recent is output first
      boolean first = true;
      for(int i = (sources.length  - 2); i >= 0; i--) {
        if(!first) {
          // \u2B05 is an arrow <--
          buffer.append(" \u2B05 ");
        }
        first = false;
        buffer.append(sources[i]);
      }
      tbody.
        tr().
          td(entry.getName()).
          td(entry.getValue()).
          td(buffer.toString()).
        _();
    }
    tbody._().
    tfoot().
      tr().
        th().input("search_init").$type(InputType.text).$name("key").$value("key")._()._().
        th().input("search_init").$type(InputType.text).$name("value").$value("value")._()._().
        th().input("search_init").$type(InputType.text).$name("source chain").$value("source chain")._()._().
        _().
      _().
    _();
  } catch(IOException e) {
    LOG.error("Error while reading "+confPath, e);
    html.p()._("Sorry got an error while reading conf file. ",confPath);
  }
}
项目:hadoop-2.6.0-cdh5.4.3    文件:ConfBlock.java   
@Override protected void render(Block html) {
  String jid = $(JOB_ID);
  if (jid.isEmpty()) {
    html.
      p()._("Sorry, can't do anything without a JobID.")._();
    return;
  }
  JobId jobID = MRApps.toJobID(jid);
  Job job = appContext.getJob(jobID);
  if (job == null) {
    html.
      p()._("Sorry, ", jid, " not found.")._();
    return;
  }
  Path confPath = job.getConfFile();
  try {
    ConfInfo info = new ConfInfo(job);

    html.div().h3(confPath.toString())._();
    TBODY<TABLE<Hamlet>> tbody = html.
      // Tasks table
    table("#conf").
      thead().
        tr().
          th(_TH, "key").
          th(_TH, "value").
          th(_TH, "source chain").
        _().
      _().
    tbody();
    for (ConfEntryInfo entry : info.getProperties()) {
      StringBuffer buffer = new StringBuffer();
      String[] sources = entry.getSource();
      //Skip the last entry, because it is always the same HDFS file, and
      // output them in reverse order so most recent is output first
      boolean first = true;
      for(int i = (sources.length  - 2); i >= 0; i--) {
        if(!first) {
          // \u2B05 is an arrow <--
          buffer.append(" \u2B05 ");
        }
        first = false;
        buffer.append(sources[i]);
      }
      tbody.
        tr().
          td(entry.getName()).
          td(entry.getValue()).
          td(buffer.toString()).
        _();
    }
    tbody._().
    tfoot().
      tr().
        th().input("search_init").$type(InputType.text).$name("key").$value("key")._()._().
        th().input("search_init").$type(InputType.text).$name("value").$value("value")._()._().
        th().input("search_init").$type(InputType.text).$name("source chain").$value("source chain")._()._().
        _().
      _().
    _();
  } catch(IOException e) {
    LOG.error("Error while reading "+confPath, e);
    html.p()._("Sorry got an error while reading conf file. ",confPath);
  }
}
项目:hadoop-plus    文件:ConfBlock.java   
@Override protected void render(Block html) {
  String jid = $(JOB_ID);
  if (jid.isEmpty()) {
    html.
      p()._("Sorry, can't do anything without a JobID.")._();
    return;
  }
  JobId jobID = MRApps.toJobID(jid);
  Job job = appContext.getJob(jobID);
  if (job == null) {
    html.
      p()._("Sorry, ", jid, " not found.")._();
    return;
  }
  Path confPath = job.getConfFile();
  try {
    ConfInfo info = new ConfInfo(job);

    html.div().h3(confPath.toString())._();
    TBODY<TABLE<Hamlet>> tbody = html.
      // Tasks table
    table("#conf").
      thead().
        tr().
          th(_TH, "key").
          th(_TH, "value").
          th(_TH, "source chain").
        _().
      _().
    tbody();
    for (ConfEntryInfo entry : info.getProperties()) {
      StringBuffer buffer = new StringBuffer();
      String[] sources = entry.getSource();
      //Skip the last entry, because it is always the same HDFS file, and
      // output them in reverse order so most recent is output first
      boolean first = true;
      for(int i = (sources.length  - 2); i >= 0; i--) {
        if(!first) {
          // \u2B05 is an arrow <--
          buffer.append(" \u2B05 ");
        }
        first = false;
        buffer.append(sources[i]);
      }
      tbody.
        tr().
          td(entry.getName()).
          td(entry.getValue()).
          td(buffer.toString()).
        _();
    }
    tbody._().
    tfoot().
      tr().
        th().input("search_init").$type(InputType.text).$name("key").$value("key")._()._().
        th().input("search_init").$type(InputType.text).$name("value").$value("value")._()._().
        th().input("search_init").$type(InputType.text).$name("source chain").$value("source chain")._()._().
        _().
      _().
    _();
  } catch(IOException e) {
    LOG.error("Error while reading "+confPath, e);
    html.p()._("Sorry got an error while reading conf file. ",confPath);
  }
}
项目:FlexMap    文件:ConfBlock.java   
@Override protected void render(Block html) {
  String jid = $(JOB_ID);
  if (jid.isEmpty()) {
    html.
      p()._("Sorry, can't do anything without a JobID.")._();
    return;
  }
  JobId jobID = MRApps.toJobID(jid);
  Job job = appContext.getJob(jobID);
  if (job == null) {
    html.
      p()._("Sorry, ", jid, " not found.")._();
    return;
  }
  Path confPath = job.getConfFile();
  try {
    ConfInfo info = new ConfInfo(job);

    html.div().h3(confPath.toString())._();
    TBODY<TABLE<Hamlet>> tbody = html.
      // Tasks table
    table("#conf").
      thead().
        tr().
          th(_TH, "key").
          th(_TH, "value").
          th(_TH, "source chain").
        _().
      _().
    tbody();
    for (ConfEntryInfo entry : info.getProperties()) {
      StringBuffer buffer = new StringBuffer();
      String[] sources = entry.getSource();
      //Skip the last entry, because it is always the same HDFS file, and
      // output them in reverse order so most recent is output first
      boolean first = true;
      for(int i = (sources.length  - 2); i >= 0; i--) {
        if(!first) {
          // \u2B05 is an arrow <--
          buffer.append(" \u2B05 ");
        }
        first = false;
        buffer.append(sources[i]);
      }
      tbody.
        tr().
          td(entry.getName()).
          td(entry.getValue()).
          td(buffer.toString()).
        _();
    }
    tbody._().
    tfoot().
      tr().
        th().input("search_init").$type(InputType.text).$name("key").$value("key")._()._().
        th().input("search_init").$type(InputType.text).$name("value").$value("value")._()._().
        th().input("search_init").$type(InputType.text).$name("source chain").$value("source chain")._()._().
        _().
      _().
    _();
  } catch(IOException e) {
    LOG.error("Error while reading "+confPath, e);
    html.p()._("Sorry got an error while reading conf file. ",confPath);
  }
}
项目:hops    文件:ConfBlock.java   
@Override protected void render(Block html) {
  String jid = $(JOB_ID);
  if (jid.isEmpty()) {
    html.
      p()._("Sorry, can't do anything without a JobID.")._();
    return;
  }
  JobId jobID = MRApps.toJobID(jid);
  Job job = appContext.getJob(jobID);
  if (job == null) {
    html.
      p()._("Sorry, ", jid, " not found.")._();
    return;
  }
  Path confPath = job.getConfFile();
  try {
    ConfInfo info = new ConfInfo(job);

    html.div().h3(confPath.toString())._();
    TBODY<TABLE<Hamlet>> tbody = html.
      // Tasks table
    table("#conf").
      thead().
        tr().
          th(_TH, "key").
          th(_TH, "value").
          th(_TH, "source chain").
        _().
      _().
    tbody();
    for (ConfEntryInfo entry : info.getProperties()) {
      StringBuffer buffer = new StringBuffer();
      String[] sources = entry.getSource();
      //Skip the last entry, because it is always the same HDFS file, and
      // output them in reverse order so most recent is output first
      boolean first = true;
      for(int i = (sources.length  - 2); i >= 0; i--) {
        if(!first) {
          buffer.append(" <- ");
        }
        first = false;
        buffer.append(sources[i]);
      }
      tbody.
        tr().
          td(entry.getName()).
          td(entry.getValue()).
          td(buffer.toString()).
        _();
    }
    tbody._().
    tfoot().
      tr().
        th().input("search_init").$type(InputType.text).$name("key").$value("key")._()._().
        th().input("search_init").$type(InputType.text).$name("value").$value("value")._()._().
        th().input("search_init").$type(InputType.text).$name("source chain").$value("source chain")._()._().
        _().
      _().
    _();
  } catch(IOException e) {
    LOG.error("Error while reading "+confPath, e);
    html.p()._("Sorry got an error while reading conf file. ",confPath);
  }
}
项目:hadoop-TCP    文件:ConfBlock.java   
@Override protected void render(Block html) {
  String jid = $(JOB_ID);
  if (jid.isEmpty()) {
    html.
      p()._("Sorry, can't do anything without a JobID.")._();
    return;
  }
  JobId jobID = MRApps.toJobID(jid);
  Job job = appContext.getJob(jobID);
  if (job == null) {
    html.
      p()._("Sorry, ", jid, " not found.")._();
    return;
  }
  Path confPath = job.getConfFile();
  try {
    ConfInfo info = new ConfInfo(job);

    html.div().h3(confPath.toString())._();
    TBODY<TABLE<Hamlet>> tbody = html.
      // Tasks table
    table("#conf").
      thead().
        tr().
          th(_TH, "key").
          th(_TH, "value").
          th(_TH, "source chain").
        _().
      _().
    tbody();
    for (ConfEntryInfo entry : info.getProperties()) {
      StringBuffer buffer = new StringBuffer();
      String[] sources = entry.getSource();
      //Skip the last entry, because it is always the same HDFS file, and
      // output them in reverse order so most recent is output first
      boolean first = true;
      for(int i = (sources.length  - 2); i >= 0; i--) {
        if(!first) {
          // \u2B05 is an arrow <--
          buffer.append(" \u2B05 ");
        }
        first = false;
        buffer.append(sources[i]);
      }
      tbody.
        tr().
          td(entry.getName()).
          td(entry.getValue()).
          td(buffer.toString()).
        _();
    }
    tbody._().
    tfoot().
      tr().
        th().input("search_init").$type(InputType.text).$name("key").$value("key")._()._().
        th().input("search_init").$type(InputType.text).$name("value").$value("value")._()._().
        th().input("search_init").$type(InputType.text).$name("source chain").$value("source chain")._()._().
        _().
      _().
    _();
  } catch(IOException e) {
    LOG.error("Error while reading "+confPath, e);
    html.p()._("Sorry got an error while reading conf file. ",confPath);
  }
}
项目:hardfs    文件:ConfBlock.java   
@Override protected void render(Block html) {
  String jid = $(JOB_ID);
  if (jid.isEmpty()) {
    html.
      p()._("Sorry, can't do anything without a JobID.")._();
    return;
  }
  JobId jobID = MRApps.toJobID(jid);
  Job job = appContext.getJob(jobID);
  if (job == null) {
    html.
      p()._("Sorry, ", jid, " not found.")._();
    return;
  }
  Path confPath = job.getConfFile();
  try {
    ConfInfo info = new ConfInfo(job);

    html.div().h3(confPath.toString())._();
    TBODY<TABLE<Hamlet>> tbody = html.
      // Tasks table
    table("#conf").
      thead().
        tr().
          th(_TH, "key").
          th(_TH, "value").
          th(_TH, "source chain").
        _().
      _().
    tbody();
    for (ConfEntryInfo entry : info.getProperties()) {
      StringBuffer buffer = new StringBuffer();
      String[] sources = entry.getSource();
      //Skip the last entry, because it is always the same HDFS file, and
      // output them in reverse order so most recent is output first
      boolean first = true;
      for(int i = (sources.length  - 2); i >= 0; i--) {
        if(!first) {
          // \u2B05 is an arrow <--
          buffer.append(" \u2B05 ");
        }
        first = false;
        buffer.append(sources[i]);
      }
      tbody.
        tr().
          td(entry.getName()).
          td(entry.getValue()).
          td(buffer.toString()).
        _();
    }
    tbody._().
    tfoot().
      tr().
        th().input("search_init").$type(InputType.text).$name("key").$value("key")._()._().
        th().input("search_init").$type(InputType.text).$name("value").$value("value")._()._().
        th().input("search_init").$type(InputType.text).$name("source chain").$value("source chain")._()._().
        _().
      _().
    _();
  } catch(IOException e) {
    LOG.error("Error while reading "+confPath, e);
    html.p()._("Sorry got an error while reading conf file. ",confPath);
  }
}
项目:hadoop-on-lustre2    文件:ConfBlock.java   
@Override protected void render(Block html) {
  String jid = $(JOB_ID);
  if (jid.isEmpty()) {
    html.
      p()._("Sorry, can't do anything without a JobID.")._();
    return;
  }
  JobId jobID = MRApps.toJobID(jid);
  Job job = appContext.getJob(jobID);
  if (job == null) {
    html.
      p()._("Sorry, ", jid, " not found.")._();
    return;
  }
  Path confPath = job.getConfFile();
  try {
    ConfInfo info = new ConfInfo(job);

    html.div().h3(confPath.toString())._();
    TBODY<TABLE<Hamlet>> tbody = html.
      // Tasks table
    table("#conf").
      thead().
        tr().
          th(_TH, "key").
          th(_TH, "value").
          th(_TH, "source chain").
        _().
      _().
    tbody();
    for (ConfEntryInfo entry : info.getProperties()) {
      StringBuffer buffer = new StringBuffer();
      String[] sources = entry.getSource();
      //Skip the last entry, because it is always the same HDFS file, and
      // output them in reverse order so most recent is output first
      boolean first = true;
      for(int i = (sources.length  - 2); i >= 0; i--) {
        if(!first) {
          // \u2B05 is an arrow <--
          buffer.append(" \u2B05 ");
        }
        first = false;
        buffer.append(sources[i]);
      }
      tbody.
        tr().
          td(entry.getName()).
          td(entry.getValue()).
          td(buffer.toString()).
        _();
    }
    tbody._().
    tfoot().
      tr().
        th().input("search_init").$type(InputType.text).$name("key").$value("key")._()._().
        th().input("search_init").$type(InputType.text).$name("value").$value("value")._()._().
        th().input("search_init").$type(InputType.text).$name("source chain").$value("source chain")._()._().
        _().
      _().
    _();
  } catch(IOException e) {
    LOG.error("Error while reading "+confPath, e);
    html.p()._("Sorry got an error while reading conf file. ",confPath);
  }
}