Java 类org.apache.lucene.index.IndexFileNames 实例源码

项目:elasticsearch_my    文件:Lucene.java   
/**
 * This method removes all lucene files from the given directory. It will first try to delete all commit points / segments
 * files to ensure broken commits or corrupted indices will not be opened in the future. If any of the segment files can't be deleted
 * this operation fails.
 */
public static void cleanLuceneIndex(Directory directory) throws IOException {
    try (Lock writeLock = directory.obtainLock(IndexWriter.WRITE_LOCK_NAME)) {
        for (final String file : directory.listAll()) {
            if (file.startsWith(IndexFileNames.SEGMENTS) || file.equals(IndexFileNames.OLD_SEGMENTS_GEN)) {
                directory.deleteFile(file); // remove all segment_N files
            }
        }
    }
    try (IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)
            .setMergePolicy(NoMergePolicy.INSTANCE) // no merges
            .setCommitOnClose(false) // no commits
            .setOpenMode(IndexWriterConfig.OpenMode.CREATE))) // force creation - don't append...
    {
        // do nothing and close this will kick of IndexFileDeleter which will remove all pending files
    }
}
项目:lams    文件:Lucene45DocValuesConsumer.java   
/** expert: Creates a new writer */
public Lucene45DocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException {
  boolean success = false;
  try {
    String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
    data = state.directory.createOutput(dataName, state.context);
    CodecUtil.writeHeader(data, dataCodec, Lucene45DocValuesFormat.VERSION_CURRENT);
    String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
    meta = state.directory.createOutput(metaName, state.context);
    CodecUtil.writeHeader(meta, metaCodec, Lucene45DocValuesFormat.VERSION_CURRENT);
    maxDoc = state.segmentInfo.getDocCount();
    success = true;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(this);
    }
  }
}
项目:lams    文件:Lucene49NormsConsumer.java   
Lucene49NormsConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException {
  maxDoc = state.segmentInfo.getDocCount();
  boolean success = false;
  try {
    String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
    data = state.directory.createOutput(dataName, state.context);
    CodecUtil.writeHeader(data, dataCodec, VERSION_CURRENT);
    String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
    meta = state.directory.createOutput(metaName, state.context);
    CodecUtil.writeHeader(meta, metaCodec, VERSION_CURRENT);
    success = true;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(this);
    }
  }
}
项目:lams    文件:Lucene49DocValuesConsumer.java   
/** expert: Creates a new writer */
public Lucene49DocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException {
  boolean success = false;
  try {
    String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
    data = state.directory.createOutput(dataName, state.context);
    CodecUtil.writeHeader(data, dataCodec, Lucene49DocValuesFormat.VERSION_CURRENT);
    String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
    meta = state.directory.createOutput(metaName, state.context);
    CodecUtil.writeHeader(meta, metaCodec, Lucene49DocValuesFormat.VERSION_CURRENT);
    maxDoc = state.segmentInfo.getDocCount();
    success = true;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(this);
    }
  }
}
项目:lams    文件:Lucene3xSegmentInfoReader.java   
@Override
public SegmentInfo read(Directory directory, String segmentName, IOContext context) throws IOException { 
  // NOTE: this is NOT how 3.x is really written...
  String fileName = IndexFileNames.segmentFileName(segmentName, "", Lucene3xSegmentInfoFormat.UPGRADED_SI_EXTENSION);

  boolean success = false;

  IndexInput input = directory.openInput(fileName, context);

  try {
    SegmentInfo si = readUpgradedSegmentInfo(segmentName, directory, input);
    success = true;
    return si;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(input);
    } else {
      input.close();
    }
  }
}
项目:lams    文件:Lucene3xCodec.java   
/** Returns file names for shared doc stores, if any, else
 * null. */
public static Set<String> getDocStoreFiles(SegmentInfo info) {
  if (Lucene3xSegmentInfoFormat.getDocStoreOffset(info) != -1) {
    final String dsName = Lucene3xSegmentInfoFormat.getDocStoreSegment(info);
    Set<String> files = new HashSet<>();
    if (Lucene3xSegmentInfoFormat.getDocStoreIsCompoundFile(info)) {
      files.add(IndexFileNames.segmentFileName(dsName, "", COMPOUND_FILE_STORE_EXTENSION));
    } else {
      files.add(IndexFileNames.segmentFileName(dsName, "", Lucene3xStoredFieldsReader.FIELDS_INDEX_EXTENSION));
      files.add(IndexFileNames.segmentFileName(dsName, "", Lucene3xStoredFieldsReader.FIELDS_EXTENSION));
      files.add(IndexFileNames.segmentFileName(dsName, "", Lucene3xTermVectorsReader.VECTORS_INDEX_EXTENSION));
      files.add(IndexFileNames.segmentFileName(dsName, "", Lucene3xTermVectorsReader.VECTORS_FIELDS_EXTENSION));
      files.add(IndexFileNames.segmentFileName(dsName, "", Lucene3xTermVectorsReader.VECTORS_DOCUMENTS_EXTENSION));
    }
    return files;
  } else {
    return null;
  }
}
项目:lams    文件:Lucene410DocValuesConsumer.java   
/** expert: Creates a new writer */
public Lucene410DocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException {
  boolean success = false;
  try {
    String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
    data = state.directory.createOutput(dataName, state.context);
    CodecUtil.writeHeader(data, dataCodec, Lucene410DocValuesFormat.VERSION_CURRENT);
    String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
    meta = state.directory.createOutput(metaName, state.context);
    CodecUtil.writeHeader(meta, metaCodec, Lucene410DocValuesFormat.VERSION_CURRENT);
    maxDoc = state.segmentInfo.getDocCount();
    success = true;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(this);
    }
  }
}
项目:lams    文件:CompoundFileDirectory.java   
/** Returns an array of strings, one for each file in the directory. */
@Override
public String[] listAll() {
  ensureOpen();
  String[] res;
  if (writer != null) {
    res = writer.listAll(); 
  } else {
    res = entries.keySet().toArray(new String[entries.size()]);
    // Add the segment name
    String seg = IndexFileNames.parseSegmentName(fileName);
    for (int i = 0; i < res.length; i++) {
      res[i] = seg + res[i];
    }
  }
  return res;
}
项目:Elasticsearch    文件:Completion090PostingsFormat.java   
public CompletionFieldsConsumer(SegmentWriteState state) throws IOException {
    this.delegatesFieldsConsumer = delegatePostingsFormat.fieldsConsumer(state);
    String suggestFSTFile = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, EXTENSION);
    IndexOutput output = null;
    boolean success = false;
    try {
        output = state.directory.createOutput(suggestFSTFile, state.context);
        CodecUtil.writeHeader(output, CODEC_NAME, SUGGEST_VERSION_CURRENT);
        /*
         * we write the delegate postings format name so we can load it
         * without getting an instance in the ctor
         */
        output.writeString(delegatePostingsFormat.getName());
        output.writeString(writeProvider.getName());
        this.suggestFieldsConsumer = writeProvider.consumer(output);
        success = true;
    } finally {
        if (!success) {
            IOUtils.closeWhileHandlingException(output);
        }
    }
}
项目:search    文件:IndexReplicationHandler.java   
/**
 * Verifies that the last file is segments_N and fails otherwise. It also
 * removes and returns the file from the list, because it needs to be handled
 * last, after all files. This is important in order to guarantee that if a
 * reader sees the new segments_N, all other segment files are already on
 * stable storage.
 * <p>
 * The reason why the code fails instead of putting segments_N file last is
 * that this indicates an error in the Revision implementation.
 */
public static String getSegmentsFile(List<String> files, boolean allowEmpty) {
  if (files.isEmpty()) {
    if (allowEmpty) {
      return null;
    } else {
      throw new IllegalStateException("empty list of files not allowed");
    }
  }

  String segmentsFile = files.remove(files.size() - 1);
  if (!segmentsFile.startsWith(IndexFileNames.SEGMENTS) || segmentsFile.equals(IndexFileNames.SEGMENTS_GEN)) {
    throw new IllegalStateException("last file to copy+sync must be segments_N but got " + segmentsFile
        + "; check your Revision implementation!");
  }
  return segmentsFile;
}
项目:search    文件:IndexAndTaxonomyRevisionTest.java   
@Test
public void testSegmentsFileLast() throws Exception {
  Directory indexDir = newDirectory();
  IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, null);
  conf.setIndexDeletionPolicy(new SnapshotDeletionPolicy(conf.getIndexDeletionPolicy()));
  IndexWriter indexWriter = new IndexWriter(indexDir, conf);

  Directory taxoDir = newDirectory();
  SnapshotDirectoryTaxonomyWriter taxoWriter = new SnapshotDirectoryTaxonomyWriter(taxoDir);
  try {
    indexWriter.addDocument(newDocument(taxoWriter));
    indexWriter.commit();
    taxoWriter.commit();
    Revision rev = new IndexAndTaxonomyRevision(indexWriter, taxoWriter);
    Map<String,List<RevisionFile>> sourceFiles = rev.getSourceFiles();
    assertEquals(2, sourceFiles.size());
    for (List<RevisionFile> files : sourceFiles.values()) {
      String lastFile = files.get(files.size() - 1).fileName;
      assertTrue(lastFile.startsWith(IndexFileNames.SEGMENTS) && !lastFile.equals(IndexFileNames.SEGMENTS_GEN));
    }
  } finally {
    IOUtils.close(indexWriter, taxoWriter, taxoDir, indexDir);
  }
}
项目:search    文件:LocalReplicatorTest.java   
@Test
public void testRevisionRelease() throws Exception {
  // we look to see that certain files are deleted:
  if (sourceDir instanceof MockDirectoryWrapper) {
    ((MockDirectoryWrapper)sourceDir).setEnableVirusScanner(false);
  }

  try {
    replicator.publish(createRevision(1));
    assertTrue(slowFileExists(sourceDir, IndexFileNames.SEGMENTS + "_1"));
    replicator.publish(createRevision(2));
    // now the files of revision 1 can be deleted
    assertTrue(slowFileExists(sourceDir, IndexFileNames.SEGMENTS + "_2"));
    assertFalse("segments_1 should not be found in index directory after revision is released", slowFileExists(sourceDir, IndexFileNames.SEGMENTS + "_1"));
  } finally {
    if (sourceDir instanceof MockDirectoryWrapper) {
      // set back to on for other tests
      ((MockDirectoryWrapper)sourceDir).setEnableVirusScanner(true);
    }
  }
}
项目:search    文件:IndexRevisionTest.java   
@Test
public void testSegmentsFileLast() throws Exception {
  Directory dir = newDirectory();
  IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, null);
  conf.setIndexDeletionPolicy(new SnapshotDeletionPolicy(conf.getIndexDeletionPolicy()));
  IndexWriter writer = new IndexWriter(dir, conf);
  try {
    writer.addDocument(new Document());
    writer.commit();
    Revision rev = new IndexRevision(writer);
    @SuppressWarnings("unchecked")
    Map<String, List<RevisionFile>> sourceFiles = rev.getSourceFiles();
    assertEquals(1, sourceFiles.size());
    List<RevisionFile> files = sourceFiles.values().iterator().next();
    String lastFile = files.get(files.size() - 1).fileName;
    assertTrue(lastFile.startsWith(IndexFileNames.SEGMENTS) && !lastFile.equals(IndexFileNames.SEGMENTS_GEN));
  } finally {
    IOUtils.close(writer, dir);
  }
}
项目:search    文件:FSTTermsWriter.java   
public FSTTermsWriter(SegmentWriteState state, PostingsWriterBase postingsWriter) throws IOException {
  final String termsFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_EXTENSION);

  this.postingsWriter = postingsWriter;
  this.fieldInfos = state.fieldInfos;
  this.out = state.directory.createOutput(termsFileName, state.context);

  boolean success = false;
  try {
    writeHeader(out);
    this.postingsWriter.init(out); 
    success = true;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(out);
    }
  }
}
项目:search    文件:DirectDocValuesConsumer.java   
DirectDocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException {
  maxDoc = state.segmentInfo.getDocCount();
  boolean success = false;
  try {
    String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
    data = state.directory.createOutput(dataName, state.context);
    CodecUtil.writeHeader(data, dataCodec, VERSION_CURRENT);
    String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
    meta = state.directory.createOutput(metaName, state.context);
    CodecUtil.writeHeader(meta, metaCodec, VERSION_CURRENT);
    success = true;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(this);
    }
  }
}
项目:search    文件:MemoryDocValuesConsumer.java   
MemoryDocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension, float acceptableOverheadRatio) throws IOException {
  this.acceptableOverheadRatio = acceptableOverheadRatio;
  maxDoc = state.segmentInfo.getDocCount();
  boolean success = false;
  try {
    String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
    data = state.directory.createOutput(dataName, state.context);
    CodecUtil.writeHeader(data, dataCodec, VERSION_CURRENT);
    String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
    meta = state.directory.createOutput(metaName, state.context);
    CodecUtil.writeHeader(meta, metaCodec, VERSION_CURRENT);
    success = true;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(this);
    }
  }
}
项目:search    文件:FSTOrdTermsWriter.java   
public FSTOrdTermsWriter(SegmentWriteState state, PostingsWriterBase postingsWriter) throws IOException {
  final String termsIndexFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_INDEX_EXTENSION);
  final String termsBlockFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_BLOCK_EXTENSION);

  this.postingsWriter = postingsWriter;
  this.fieldInfos = state.fieldInfos;

  boolean success = false;
  try {
    this.indexOut = state.directory.createOutput(termsIndexFileName, state.context);
    this.blockOut = state.directory.createOutput(termsBlockFileName, state.context);
    writeHeader(indexOut);
    writeHeader(blockOut);
    this.postingsWriter.init(blockOut); 
    success = true;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(indexOut, blockOut);
    }
  }
}
项目:search    文件:FixedGapTermsIndexWriter.java   
public FixedGapTermsIndexWriter(SegmentWriteState state) throws IOException {
  final String indexFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_INDEX_EXTENSION);
  termIndexInterval = state.termIndexInterval;
  out = state.directory.createOutput(indexFileName, state.context);
  boolean success = false;
  try {
    fieldInfos = state.fieldInfos;
    writeHeader(out);
    out.writeInt(termIndexInterval);
    success = true;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(out);
    }
  }
}
项目:search    文件:BlockTermsWriter.java   
public BlockTermsWriter(TermsIndexWriterBase termsIndexWriter,
    SegmentWriteState state, PostingsWriterBase postingsWriter)
    throws IOException {
  final String termsFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_EXTENSION);
  this.termsIndexWriter = termsIndexWriter;
  out = state.directory.createOutput(termsFileName, state.context);
  boolean success = false;
  try {
    fieldInfos = state.fieldInfos;
    writeHeader(out);
    currentField = null;
    this.postingsWriter = postingsWriter;
    // segment = state.segmentName;

    //System.out.println("BTW.init seg=" + state.segmentName);

    postingsWriter.init(out); // have consumer write its format/header
    success = true;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(out);
    }
  }
}
项目:search    文件:PulsingPostingsWriter.java   
@Override
public void close() throws IOException {
  wrappedPostingsWriter.close();
  if (wrappedPostingsWriter instanceof PulsingPostingsWriter ||
      VERSION_CURRENT < VERSION_META_ARRAY) {
    return;
  }
  String summaryFileName = IndexFileNames.segmentFileName(segmentState.segmentInfo.name, segmentState.segmentSuffix, SUMMARY_EXTENSION);
  IndexOutput out = null;
  try {
    out = segmentState.directory.createOutput(summaryFileName, segmentState.context);
    CodecUtil.writeHeader(out, CODEC, VERSION_CURRENT);
    out.writeVInt(fields.size());
    for (FieldMetaData field : fields) {
      out.writeVInt(field.fieldNumber);
      out.writeVInt(field.longsSize);
    }
    out.close();
  } finally {
    IOUtils.closeWhileHandlingException(out);
  }
}
项目:search    文件:RAMOnlyPostingsFormat.java   
@Override
public FieldsProducer fieldsProducer(SegmentReadState readState)
  throws IOException {

  // Load our ID:
  final String idFileName = IndexFileNames.segmentFileName(readState.segmentInfo.name, readState.segmentSuffix, ID_EXTENSION);
  IndexInput in = readState.directory.openInput(idFileName, readState.context);
  boolean success = false;
  final int id;
  try {
    CodecUtil.checkHeader(in, RAM_ONLY_NAME, VERSION_START, VERSION_LATEST);
    id = in.readVInt();
    success = true;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(in);
    } else {
      IOUtils.close(in);
    }
  }

  synchronized(state) {
    return state.get(id);
  }
}
项目:search    文件:TermInfosWriter.java   
TermInfosWriter(Directory directory, String segment, FieldInfos fis,
                int interval)
     throws IOException {
  initialize(directory, segment, fis, interval, false);
  boolean success = false;
  try {
    other = new TermInfosWriter(directory, segment, fis, interval, true);
    other.other = this;
    success = true;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(output);

      try {
        directory.deleteFile(IndexFileNames.segmentFileName(segment, "",
            (isIndex ? Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION
                : Lucene3xPostingsFormat.TERMS_EXTENSION)));
      } catch (IOException ignored) {
      }
    }
  }
}
项目:search    文件:PreFlexRWTermVectorsWriter.java   
public PreFlexRWTermVectorsWriter(Directory directory, String segment, IOContext context) throws IOException {
  this.directory = directory;
  this.segment = segment;
  boolean success = false;
  try {
    // Open files for TermVector storage
    tvx = directory.createOutput(IndexFileNames.segmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_INDEX_EXTENSION), context);
    tvx.writeInt(Lucene3xTermVectorsReader.FORMAT_CURRENT);
    tvd = directory.createOutput(IndexFileNames.segmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_DOCUMENTS_EXTENSION), context);
    tvd.writeInt(Lucene3xTermVectorsReader.FORMAT_CURRENT);
    tvf = directory.createOutput(IndexFileNames.segmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_FIELDS_EXTENSION), context);
    tvf.writeInt(Lucene3xTermVectorsReader.FORMAT_CURRENT);
    success = true;
  } finally {
    if (!success) {
      abort();
    }
  }
}
项目:search    文件:Lucene42NormsConsumer.java   
Lucene42NormsConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension, float acceptableOverheadRatio) throws IOException {
  this.acceptableOverheadRatio = acceptableOverheadRatio;
  maxDoc = state.segmentInfo.getDocCount();
  boolean success = false;
  try {
    String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
    data = state.directory.createOutput(dataName, state.context);
    CodecUtil.writeHeader(data, dataCodec, VERSION_CURRENT);
    String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
    meta = state.directory.createOutput(metaName, state.context);
    CodecUtil.writeHeader(meta, metaCodec, VERSION_CURRENT);
    success = true;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(this);
    }
  }
}
项目:search    文件:Lucene42DocValuesConsumer.java   
Lucene42DocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension, float acceptableOverheadRatio) throws IOException {
  this.acceptableOverheadRatio = acceptableOverheadRatio;
  maxDoc = state.segmentInfo.getDocCount();
  boolean success = false;
  try {
    String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
    data = state.directory.createOutput(dataName, state.context);
    // this writer writes the format 4.2 did!
    CodecUtil.writeHeader(data, dataCodec, VERSION_GCD_COMPRESSION);
    String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
    meta = state.directory.createOutput(metaName, state.context);
    CodecUtil.writeHeader(meta, metaCodec, VERSION_GCD_COMPRESSION);
    success = true;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(this);
    }
  }
}
项目:search    文件:Lucene49NormsConsumer.java   
Lucene49NormsConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException {
  maxDoc = state.segmentInfo.getDocCount();
  boolean success = false;
  try {
    String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
    data = state.directory.createOutput(dataName, state.context);
    CodecUtil.writeHeader(data, dataCodec, VERSION_CURRENT);
    String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
    meta = state.directory.createOutput(metaName, state.context);
    CodecUtil.writeHeader(meta, metaCodec, VERSION_CURRENT);
    success = true;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(this);
    }
  }
}
项目:search    文件:Lucene49DocValuesConsumer.java   
/** expert: Creates a new writer */
public Lucene49DocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException {
  boolean success = false;
  try {
    String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
    data = state.directory.createOutput(dataName, state.context);
    CodecUtil.writeHeader(data, dataCodec, Lucene49DocValuesFormat.VERSION_CURRENT);
    String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
    meta = state.directory.createOutput(metaName, state.context);
    CodecUtil.writeHeader(meta, metaCodec, Lucene49DocValuesFormat.VERSION_CURRENT);
    maxDoc = state.segmentInfo.getDocCount();
    success = true;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(this);
    }
  }
}
项目:search    文件:Lucene3xSegmentInfoReader.java   
@Override
public SegmentInfo read(Directory directory, String segmentName, IOContext context) throws IOException { 
  // NOTE: this is NOT how 3.x is really written...
  String fileName = IndexFileNames.segmentFileName(segmentName, "", Lucene3xSegmentInfoFormat.UPGRADED_SI_EXTENSION);

  boolean success = false;

  IndexInput input = directory.openInput(fileName, context);

  try {
    SegmentInfo si = readUpgradedSegmentInfo(segmentName, directory, input);
    success = true;
    return si;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(input);
    } else {
      input.close();
    }
  }
}
项目:search    文件:Lucene3xCodec.java   
/** Returns file names for shared doc stores, if any, else
 * null. */
public static Set<String> getDocStoreFiles(SegmentInfo info) {
  if (Lucene3xSegmentInfoFormat.getDocStoreOffset(info) != -1) {
    final String dsName = Lucene3xSegmentInfoFormat.getDocStoreSegment(info);
    Set<String> files = new HashSet<>();
    if (Lucene3xSegmentInfoFormat.getDocStoreIsCompoundFile(info)) {
      files.add(IndexFileNames.segmentFileName(dsName, "", COMPOUND_FILE_STORE_EXTENSION));
    } else {
      files.add(IndexFileNames.segmentFileName(dsName, "", Lucene3xStoredFieldsReader.FIELDS_INDEX_EXTENSION));
      files.add(IndexFileNames.segmentFileName(dsName, "", Lucene3xStoredFieldsReader.FIELDS_EXTENSION));
      files.add(IndexFileNames.segmentFileName(dsName, "", Lucene3xTermVectorsReader.VECTORS_INDEX_EXTENSION));
      files.add(IndexFileNames.segmentFileName(dsName, "", Lucene3xTermVectorsReader.VECTORS_FIELDS_EXTENSION));
      files.add(IndexFileNames.segmentFileName(dsName, "", Lucene3xTermVectorsReader.VECTORS_DOCUMENTS_EXTENSION));
    }
    return files;
  } else {
    return null;
  }
}
项目:search    文件:Lucene410DocValuesConsumer.java   
/** expert: Creates a new writer */
public Lucene410DocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException {
  boolean success = false;
  try {
    String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
    data = state.directory.createOutput(dataName, state.context);
    CodecUtil.writeHeader(data, dataCodec, Lucene410DocValuesFormat.VERSION_CURRENT);
    String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
    meta = state.directory.createOutput(metaName, state.context);
    CodecUtil.writeHeader(meta, metaCodec, Lucene410DocValuesFormat.VERSION_CURRENT);
    maxDoc = state.segmentInfo.getDocCount();
    success = true;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(this);
    }
  }
}
项目:search    文件:CompoundFileDirectory.java   
/** Returns an array of strings, one for each file in the directory. */
@Override
public String[] listAll() {
  ensureOpen();
  String[] res;
  if (writer != null) {
    res = writer.listAll(); 
  } else {
    res = entries.keySet().toArray(new String[entries.size()]);
    // Add the segment name
    String seg = IndexFileNames.parseSegmentName(fileName);
    for (int i = 0; i < res.length; i++) {
      res[i] = seg + res[i];
    }
  }
  return res;
}
项目:elasticsearch_my    文件:Store.java   
/**
 * This method deletes every file in this store that is not contained in the given source meta data or is a
 * legacy checksum file. After the delete it pulls the latest metadata snapshot from the store and compares it
 * to the given snapshot. If the snapshots are inconsistent an illegal state exception is thrown
 *
 * @param reason         the reason for this cleanup operation logged for each deleted file
 * @param sourceMetaData the metadata used for cleanup. all files in this metadata should be kept around.
 * @throws IOException           if an IOException occurs
 * @throws IllegalStateException if the latest snapshot in this store differs from the given one after the cleanup.
 */
public void cleanupAndVerify(String reason, MetadataSnapshot sourceMetaData) throws IOException {
    metadataLock.writeLock().lock();
    try (Lock writeLock = directory.obtainLock(IndexWriter.WRITE_LOCK_NAME)) {
        final StoreDirectory dir = directory;
        for (String existingFile : dir.listAll()) {
            if (Store.isAutogenerated(existingFile) || sourceMetaData.contains(existingFile)) {
                continue; // don't delete snapshot file, or the checksums file (note, this is extra protection since the Store won't delete checksum)
            }
            try {
                dir.deleteFile(reason, existingFile);
                // FNF should not happen since we hold a write lock?
            } catch (IOException ex) {
                if (existingFile.startsWith(IndexFileNames.SEGMENTS)
                        || existingFile.equals(IndexFileNames.OLD_SEGMENTS_GEN)) {
                    // TODO do we need to also fail this if we can't delete the pending commit file?
                    // if one of those files can't be deleted we better fail the cleanup otherwise we might leave an old commit point around?
                    throw new IllegalStateException("Can't delete " + existingFile + " - cleanup failed", ex);
                }
                logger.debug((Supplier<?>) () -> new ParameterizedMessage("failed to delete file [{}]", existingFile), ex);
                // ignore, we don't really care, will get deleted later on
            }
        }
        final Store.MetadataSnapshot metadataOrEmpty = getMetadata(null);
        verifyAfterCleanup(sourceMetaData, metadataOrEmpty);
    } finally {
        metadataLock.writeLock().unlock();
    }
}
项目:elasticsearch_my    文件:Store.java   
private int numSegmentFiles() { // only for asserts
    int count = 0;
    for (StoreFileMetaData file : this) {
        if (file.name().startsWith(IndexFileNames.SEGMENTS)) {
            count++;
        }
    }
    return count;
}
项目:elasticsearch_my    文件:Lucene.java   
/**
 * Reads the segments infos from the given commit, failing if it fails to load
 */
public static SegmentInfos readSegmentInfos(IndexCommit commit) throws IOException {
    // Using commit.getSegmentsFileName() does NOT work here, have to
    // manually create the segment filename
    String filename = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", commit.getGeneration());
    return SegmentInfos.readCommit(commit.getDirectory(), filename);
}
项目:elasticsearch_my    文件:StoreTests.java   
public static void assertConsistent(Store store, Store.MetadataSnapshot metadata) throws IOException {
    for (String file : store.directory().listAll()) {
        if (!IndexWriter.WRITE_LOCK_NAME.equals(file) && !IndexFileNames.OLD_SEGMENTS_GEN.equals(file) && file.startsWith("extra") == false) {
            assertTrue(file + " is not in the map: " + metadata.asMap().size() + " vs. " + store.directory().listAll().length, metadata.asMap().containsKey(file));
        } else {
            assertFalse(file + " is not in the map: " + metadata.asMap().size() + " vs. " + store.directory().listAll().length, metadata.asMap().containsKey(file));
        }
    }
}
项目:elasticsearch_my    文件:CorruptedFileIT.java   
/**
 * prunes the list of index files such that only the latest del generation files are contained.
 */
private void pruneOldDeleteGenerations(Set<Path> files) {
    final TreeSet<Path> delFiles = new TreeSet<>();
    for (Path file : files) {
        if (file.getFileName().toString().endsWith(".liv")) {
            delFiles.add(file);
        }
    }
    Path last = null;
    for (Path current : delFiles) {
        if (last != null) {
            final String newSegmentName = IndexFileNames.parseSegmentName(current.getFileName().toString());
            final String oldSegmentName = IndexFileNames.parseSegmentName(last.getFileName().toString());
            if (newSegmentName.equals(oldSegmentName)) {
                int oldGen = Integer.parseInt(IndexFileNames.stripExtension(IndexFileNames.stripSegmentName(last.getFileName().toString())).replace("_", ""), Character.MAX_RADIX);
                int newGen = Integer.parseInt(IndexFileNames.stripExtension(IndexFileNames.stripSegmentName(current.getFileName().toString())).replace("_", ""), Character.MAX_RADIX);
                if (newGen > oldGen) {
                    files.remove(last);
                } else {
                    files.remove(current);
                    continue;
                }
            }
        }
        last = current;
    }
}
项目:lams    文件:Lucene40SegmentInfoWriter.java   
/** Save a single segment's info. */
@Override
public void write(Directory dir, SegmentInfo si, FieldInfos fis, IOContext ioContext) throws IOException {
  final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene40SegmentInfoFormat.SI_EXTENSION);
  si.addFile(fileName);

  final IndexOutput output = dir.createOutput(fileName, ioContext);

  boolean success = false;
  try {
    CodecUtil.writeHeader(output, Lucene40SegmentInfoFormat.CODEC_NAME, Lucene40SegmentInfoFormat.VERSION_CURRENT);
    // Write the Lucene version that created this segment, since 3.1
    output.writeString(si.getVersion().toString());
    output.writeInt(si.getDocCount());

    output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO));
    output.writeStringStringMap(si.getDiagnostics());
    output.writeStringStringMap(Collections.<String,String>emptyMap());
    output.writeStringSet(si.files());

    success = true;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(output);
      // TODO: why must we do this? do we not get tracking dir wrapper?
      IOUtils.deleteFilesIgnoringExceptions(si.dir, fileName);
    } else {
      output.close();
    }
  }
}
项目:lams    文件:Lucene40NormsFormat.java   
@Override
public DocValuesProducer normsProducer(SegmentReadState state) throws IOException {
  String filename = IndexFileNames.segmentFileName(state.segmentInfo.name, 
                                                   "nrm", 
                                                   IndexFileNames.COMPOUND_FILE_EXTENSION);
  return new Lucene40DocValuesReader(state, filename, Lucene40FieldInfosReader.LEGACY_NORM_TYPE_KEY);
}
项目:lams    文件:Lucene40StoredFieldsWriter.java   
public void abort() {
  try {
    close();
  } catch (Throwable ignored) {}
  IOUtils.deleteFilesIgnoringExceptions(directory,
      IndexFileNames.segmentFileName(segment, "", FIELDS_EXTENSION),
      IndexFileNames.segmentFileName(segment, "", FIELDS_INDEX_EXTENSION));
}
项目:lams    文件:Lucene40PostingsReader.java   
/** Sole constructor. */
public Lucene40PostingsReader(Directory dir, FieldInfos fieldInfos, SegmentInfo segmentInfo, IOContext ioContext, String segmentSuffix) throws IOException {
  boolean success = false;
  IndexInput freqIn = null;
  IndexInput proxIn = null;
  try {
    freqIn = dir.openInput(IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, Lucene40PostingsFormat.FREQ_EXTENSION),
                         ioContext);
    CodecUtil.checkHeader(freqIn, FRQ_CODEC, VERSION_START, VERSION_CURRENT);
    // TODO: hasProx should (somehow!) become codec private,
    // but it's tricky because 1) FIS.hasProx is global (it
    // could be all fields that have prox are written by a
    // different codec), 2) the field may have had prox in
    // the past but all docs w/ that field were deleted.
    // Really we'd need to init prxOut lazily on write, and
    // then somewhere record that we actually wrote it so we
    // know whether to open on read:
    if (fieldInfos.hasProx()) {
      proxIn = dir.openInput(IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, Lucene40PostingsFormat.PROX_EXTENSION),
                           ioContext);
      CodecUtil.checkHeader(proxIn, PRX_CODEC, VERSION_START, VERSION_CURRENT);
    } else {
      proxIn = null;
    }
    this.freqIn = freqIn;
    this.proxIn = proxIn;
    success = true;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(freqIn, proxIn);
    }
  }
}