/** This test creates compound file based on a single file. * Files of different sizes are tested: 0, 1, 10, 100 bytes. */ public void testSingleFile() throws IOException { int data[] = new int[] { 0, 1, 10, 100 }; for (int i=0; i<data.length; i++) { String name = "t" + data[i]; createSequenceFile(dir, name, (byte) 0, data[i]); CompoundFileDirectory csw = new CompoundFileDirectory(dir, name + ".cfs", newIOContext(random()), true); dir.copy(csw, name, name, newIOContext(random())); csw.close(); CompoundFileDirectory csr = new CompoundFileDirectory(dir, name + ".cfs", newIOContext(random()), false); IndexInput expected = dir.openInput(name, newIOContext(random())); IndexInput actual = csr.openInput(name, newIOContext(random())); assertSameStreams(name, expected, actual); assertSameSeekBehavior(name, expected, actual); expected.close(); actual.close(); csr.close(); } }
/** This test creates compound file based on two files. * */ public void testTwoFiles() throws IOException { createSequenceFile(dir, "d1", (byte) 0, 15); createSequenceFile(dir, "d2", (byte) 0, 114); CompoundFileDirectory csw = new CompoundFileDirectory(dir, "d.cfs", newIOContext(random()), true); dir.copy(csw, "d1", "d1", newIOContext(random())); dir.copy(csw, "d2", "d2", newIOContext(random())); csw.close(); CompoundFileDirectory csr = new CompoundFileDirectory(dir, "d.cfs", newIOContext(random()), false); IndexInput expected = dir.openInput("d1", newIOContext(random())); IndexInput actual = csr.openInput("d1", newIOContext(random())); assertSameStreams("d1", expected, actual); assertSameSeekBehavior("d1", expected, actual); expected.close(); actual.close(); expected = dir.openInput("d2", newIOContext(random())); actual = csr.openInput("d2", newIOContext(random())); assertSameStreams("d2", expected, actual); assertSameSeekBehavior("d2", expected, actual); expected.close(); actual.close(); csr.close(); }
public void testFileNotFound() throws IOException { setUp_2(); CompoundFileDirectory cr = new CompoundFileDirectory(dir, "f.comp", newIOContext(random()), false); // Open two files try { cr.openInput("bogus", newIOContext(random())); fail("File not found"); } catch (IOException e) { /* success */ //System.out.println("SUCCESS: File Not Found: " + e); } cr.close(); }
public void testAddExternalFile() throws IOException { createSequenceFile(dir, "d1", (byte) 0, 15); Directory newDir = newDirectory(); CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), true); dir.copy(csw, "d1", "d1", newIOContext(random())); csw.close(); CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), false); IndexInput expected = dir.openInput("d1", newIOContext(random())); IndexInput actual = csr.openInput("d1", newIOContext(random())); assertSameStreams("d1", expected, actual); assertSameSeekBehavior("d1", expected, actual); expected.close(); actual.close(); csr.close(); newDir.close(); }
public void testAppendTwice() throws IOException { Directory newDir = newDirectory(); CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), true); createSequenceFile(newDir, "d1", (byte) 0, 15); IndexOutput out = csw.createOutput("d.xyz", newIOContext(random())); out.writeInt(0); out.close(); assertEquals(1, csw.listAll().length); assertEquals("d.xyz", csw.listAll()[0]); csw.close(); CompoundFileDirectory cfr = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), false); assertEquals(1, cfr.listAll().length); assertEquals("d.xyz", cfr.listAll()[0]); cfr.close(); newDir.close(); }
private void checkFiles(Directory dir) throws IOException { for (String file : dir.listAll()) { if (file.endsWith(IndexFileNames.COMPOUND_FILE_EXTENSION)) { CompoundFileDirectory cfsDir = new CompoundFileDirectory(dir, file, newIOContext(random()), false); checkFiles(cfsDir); // recurse into cfs cfsDir.close(); } IndexInput in = null; boolean success = false; try { in = dir.openInput(file, newIOContext(random())); success = true; } finally { if (success) { IOUtils.close(in); } else { IOUtils.closeWhileHandlingException(in); } } } }
private void checkHeaders(Directory dir) throws IOException { for (String file : dir.listAll()) { if (file.equals(IndexWriter.WRITE_LOCK_NAME)) { continue; // write.lock has no footer, thats ok } if (file.endsWith(IndexFileNames.COMPOUND_FILE_EXTENSION)) { CompoundFileDirectory cfsDir = new CompoundFileDirectory(dir, file, newIOContext(random()), false); checkHeaders(cfsDir); // recurse into cfs cfsDir.close(); } IndexInput in = null; boolean success = false; try { in = dir.openInput(file, newIOContext(random())); CodecUtil.checksumEntireFile(in); success = true; } finally { if (success) { IOUtils.close(in); } else { IOUtils.closeWhileHandlingException(in); } } } }
public void testAppendTwice() throws IOException { Directory newDir = newDirectory(); CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), true); createSequenceFile(newDir, "d1", (byte) 0, 15); IndexOutput out = csw.createOutput("d.xyz", newIOContext(random())); out.writeInt(0); try { newDir.copy(csw, "d1", "d1", newIOContext(random())); fail("file does already exist"); } catch (IllegalArgumentException e) { // } out.close(); assertEquals(1, csw.listAll().length); assertEquals("d.xyz", csw.listAll()[0]); csw.close(); CompoundFileDirectory cfr = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), false); assertEquals(1, cfr.listAll().length); assertEquals("d.xyz", cfr.listAll()[0]); cfr.close(); newDir.close(); }
private void checkHeaders(Directory dir) throws IOException { for (String file : dir.listAll()) { if (file.equals(IndexFileNames.SEGMENTS_GEN)) { continue; // segments.gen has no header, thats ok } if (file.endsWith(IndexFileNames.COMPOUND_FILE_EXTENSION)) { CompoundFileDirectory cfsDir = new CompoundFileDirectory(dir, file, newIOContext(random()), false); checkHeaders(cfsDir); // recurse into cfs cfsDir.close(); } IndexInput in = null; boolean success = false; try { in = dir.openInput(file, newIOContext(random())); int val = in.readInt(); assertEquals(file + " has no codec header, instead found: " + val, CodecUtil.CODEC_MAGIC, val); success = true; } finally { if (success) { IOUtils.close(in); } else { IOUtils.closeWhileHandlingException(in); } } } }
@Override public TermVectorsReader vectorsReader(Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context) throws IOException { final String fileName = IndexFileNames.segmentFileName(Lucene3xSegmentInfoFormat.getDocStoreSegment(segmentInfo), "", Lucene3xTermVectorsReader.VECTORS_FIELDS_EXTENSION); // Unfortunately, for 3.x indices, each segment's // FieldInfos can lie about hasVectors (claim it's true // when really it's false).... so we have to carefully // check if the files really exist before trying to open // them (4.x has fixed this): final boolean exists; if (Lucene3xSegmentInfoFormat.getDocStoreOffset(segmentInfo) != -1 && Lucene3xSegmentInfoFormat.getDocStoreIsCompoundFile(segmentInfo)) { String cfxFileName = IndexFileNames.segmentFileName(Lucene3xSegmentInfoFormat.getDocStoreSegment(segmentInfo), "", Lucene3xCodec.COMPOUND_FILE_STORE_EXTENSION); if (segmentInfo.dir.fileExists(cfxFileName)) { Directory cfsDir = new CompoundFileDirectory(segmentInfo.dir, cfxFileName, context, false); try { exists = cfsDir.fileExists(fileName); } finally { cfsDir.close(); } } else { exists = false; } } else { exists = directory.fileExists(fileName); } if (!exists) { // 3x's FieldInfos sometimes lies and claims a segment // has vectors when it doesn't: return null; } else { return new Lucene3xTermVectorsReader(directory, segmentInfo, fieldInfos, context); } }
/** Setup a larger compound file with a number of components, each of * which is a sequential file (so that we can easily tell that we are * reading in the right byte). The methods sets up 20 files - f0 to f19, * the size of each file is 1000 bytes. */ private void setUp_2() throws IOException { CompoundFileDirectory cw = new CompoundFileDirectory(dir, "f.comp", newIOContext(random()), true); for (int i=0; i<20; i++) { createSequenceFile(dir, "f" + i, (byte) 0, 2000); String fileName = "f" + i; dir.copy(cw, fileName, fileName, newIOContext(random())); } cw.close(); }
public void testEmptyCFS() throws IOException { Directory newDir = newDirectory(); CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), true); csw.close(); CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), false); assertEquals(0, csr.listAll().length); csr.close(); newDir.close(); }
public void testManySubFiles() throws IOException { final Directory d = newFSDirectory(createTempDir("CFSManySubFiles")); final int FILE_COUNT = atLeast(500); for(int fileIdx=0;fileIdx<FILE_COUNT;fileIdx++) { IndexOutput out = d.createOutput("file." + fileIdx, newIOContext(random())); out.writeByte((byte) fileIdx); out.close(); } final CompoundFileDirectory cfd = new CompoundFileDirectory(d, "c.cfs", newIOContext(random()), true); for(int fileIdx=0;fileIdx<FILE_COUNT;fileIdx++) { final String fileName = "file." + fileIdx; d.copy(cfd, fileName, fileName, newIOContext(random())); } cfd.close(); final IndexInput[] ins = new IndexInput[FILE_COUNT]; final CompoundFileDirectory cfr = new CompoundFileDirectory(d, "c.cfs", newIOContext(random()), false); for(int fileIdx=0;fileIdx<FILE_COUNT;fileIdx++) { ins[fileIdx] = cfr.openInput("file." + fileIdx, newIOContext(random())); } for(int fileIdx=0;fileIdx<FILE_COUNT;fileIdx++) { assertEquals((byte) fileIdx, ins[fileIdx].readByte()); } for(int fileIdx=0;fileIdx<FILE_COUNT;fileIdx++) { ins[fileIdx].close(); } cfr.close(); d.close(); }
private void checkHeaders(Directory dir) throws IOException { for (String file : dir.listAll()) { if (file.equals(IndexWriter.WRITE_LOCK_NAME)) { continue; // write.lock has no header, thats ok } if (file.equals(IndexFileNames.SEGMENTS_GEN)) { continue; // segments.gen has no header, thats ok } if (file.endsWith(IndexFileNames.COMPOUND_FILE_EXTENSION)) { CompoundFileDirectory cfsDir = new CompoundFileDirectory(dir, file, newIOContext(random()), false); checkHeaders(cfsDir); // recurse into cfs cfsDir.close(); } IndexInput in = null; boolean success = false; try { in = dir.openInput(file, newIOContext(random())); int val = in.readInt(); assertEquals(file + " has no codec header, instead found: " + val, CodecUtil.CODEC_MAGIC, val); success = true; } finally { if (success) { IOUtils.close(in); } else { IOUtils.closeWhileHandlingException(in); } } } }
public void testManySubFiles() throws IOException { final Directory d = newFSDirectory(_TestUtil.getTempDir("CFSManySubFiles")); final int FILE_COUNT = atLeast(500); for(int fileIdx=0;fileIdx<FILE_COUNT;fileIdx++) { IndexOutput out = d.createOutput("file." + fileIdx, newIOContext(random())); out.writeByte((byte) fileIdx); out.close(); } final CompoundFileDirectory cfd = new CompoundFileDirectory(d, "c.cfs", newIOContext(random()), true); for(int fileIdx=0;fileIdx<FILE_COUNT;fileIdx++) { final String fileName = "file." + fileIdx; d.copy(cfd, fileName, fileName, newIOContext(random())); } cfd.close(); final IndexInput[] ins = new IndexInput[FILE_COUNT]; final CompoundFileDirectory cfr = new CompoundFileDirectory(d, "c.cfs", newIOContext(random()), false); for(int fileIdx=0;fileIdx<FILE_COUNT;fileIdx++) { ins[fileIdx] = cfr.openInput("file." + fileIdx, newIOContext(random())); } for(int fileIdx=0;fileIdx<FILE_COUNT;fileIdx++) { assertEquals((byte) fileIdx, ins[fileIdx].readByte()); } for(int fileIdx=0;fileIdx<FILE_COUNT;fileIdx++) { ins[fileIdx].close(); } cfr.close(); d.close(); }
Lucene40DocValuesReader(SegmentReadState state, String filename, String legacyKey) throws IOException { this.state = state; this.legacyKey = legacyKey; this.dir = new CompoundFileDirectory(state.directory, filename, state.context, false); ramBytesUsed = new AtomicLong(RamUsageEstimator.shallowSizeOf(getClass())); }
Lucene40DocValuesWriter(SegmentWriteState state, String filename, String legacyKey) throws IOException { this.state = state; this.legacyKey = legacyKey; this.dir = new CompoundFileDirectory(state.directory, filename, state.context, true); }