001
014
015 package com.liferay.portal.search.lucene;
016
017 import com.liferay.portal.kernel.log.Log;
018 import com.liferay.portal.kernel.log.LogFactoryUtil;
019 import com.liferay.portal.kernel.search.SearchEngineUtil;
020 import com.liferay.portal.kernel.util.FileUtil;
021 import com.liferay.portal.kernel.util.StringPool;
022 import com.liferay.portal.search.lucene.dump.DumpIndexDeletionPolicy;
023 import com.liferay.portal.search.lucene.dump.IndexCommitSerializationUtil;
024 import com.liferay.portal.util.PropsValues;
025
026 import java.io.File;
027 import java.io.IOException;
028 import java.io.InputStream;
029 import java.io.OutputStream;
030
031 import java.util.Map;
032 import java.util.concurrent.ConcurrentHashMap;
033 import java.util.concurrent.CountDownLatch;
034 import java.util.concurrent.Executors;
035 import java.util.concurrent.ScheduledExecutorService;
036 import java.util.concurrent.TimeUnit;
037 import java.util.concurrent.locks.Lock;
038 import java.util.concurrent.locks.ReentrantLock;
039
040 import org.apache.lucene.document.Document;
041 import org.apache.lucene.index.IndexWriter;
042 import org.apache.lucene.index.Term;
043 import org.apache.lucene.store.Directory;
044 import org.apache.lucene.store.FSDirectory;
045 import org.apache.lucene.store.MMapDirectory;
046 import org.apache.lucene.store.RAMDirectory;
047
048
054 public class IndexAccessorImpl implements IndexAccessor {
055
056 public IndexAccessorImpl(long companyId) {
057 _companyId = companyId;
058
059 _checkLuceneDir();
060 _initIndexWriter();
061 _initCommitScheduler();
062 }
063
064 public void addDocument(Document document) throws IOException {
065 if (SearchEngineUtil.isIndexReadOnly()) {
066 return;
067 }
068
069 _write(null, document);
070 }
071
072 public void close() {
073 try {
074 _indexWriter.close();
075 }
076 catch (Exception e) {
077 _log.error("Closing Lucene writer failed for " + _companyId, e);
078 }
079 }
080
081 public void delete() {
082 if (SearchEngineUtil.isIndexReadOnly()) {
083 return;
084 }
085
086 close();
087
088 _deleteDirectory();
089
090 _initIndexWriter();
091 }
092
093 public void deleteDocuments(Term term) throws IOException {
094 if (SearchEngineUtil.isIndexReadOnly()) {
095 return;
096 }
097
098 try {
099 _indexWriter.deleteDocuments(term);
100
101 _batchCount++;
102 }
103 finally {
104 _commit();
105 }
106 }
107
108 public void dumpIndex(OutputStream outputStream) throws IOException {
109 _dumpIndexDeletionPolicy.dump(outputStream, _indexWriter, _commitLock);
110 }
111
112 public void enableDumpIndex() {
113 _countDownLatch.countDown();
114 }
115
116 public long getCompanyId() {
117 return _companyId;
118 }
119
120 public long getLastGeneration() {
121 if (_countDownLatch.getCount() > 0) {
122 return DEFAULT_LAST_GENERATION;
123 }
124
125 return _dumpIndexDeletionPolicy.getLastGeneration();
126 }
127
128 public Directory getLuceneDir() {
129 if (_log.isDebugEnabled()) {
130 _log.debug("Lucene store type " + PropsValues.LUCENE_STORE_TYPE);
131 }
132
133 if (PropsValues.LUCENE_STORE_TYPE.equals(_LUCENE_STORE_TYPE_FILE)) {
134 return _getLuceneDirFile();
135 }
136 else if (PropsValues.LUCENE_STORE_TYPE.equals(
137 _LUCENE_STORE_TYPE_JDBC)) {
138
139 throw new IllegalArgumentException(
140 "Store type JDBC is no longer supported in favor of SOLR");
141 }
142 else if (PropsValues.LUCENE_STORE_TYPE.equals(_LUCENE_STORE_TYPE_RAM)) {
143 return _getLuceneDirRam();
144 }
145 else {
146 throw new RuntimeException(
147 "Invalid store type " + PropsValues.LUCENE_STORE_TYPE);
148 }
149 }
150
151 public void loadIndex(InputStream inputStream) throws IOException {
152 File tempFile = FileUtil.createTempFile();
153
154 Directory tempDirectory = FSDirectory.open(tempFile);
155
156 IndexCommitSerializationUtil.deserializeIndex(
157 inputStream, tempDirectory);
158
159 close();
160
161 _deleteDirectory();
162
163 Directory.copy(tempDirectory, getLuceneDir(), true);
164
165 _initIndexWriter();
166
167 tempDirectory.close();
168
169 FileUtil.deltree(tempFile);
170 }
171
172 public void updateDocument(Term term, Document document)
173 throws IOException {
174
175 if (SearchEngineUtil.isIndexReadOnly()) {
176 return;
177 }
178
179 if (_log.isDebugEnabled()) {
180 _log.debug("Indexing " + document);
181 }
182
183 _write(term, document);
184 }
185
186 private void _checkLuceneDir() {
187 if (SearchEngineUtil.isIndexReadOnly()) {
188 return;
189 }
190
191 try {
192 Directory directory = getLuceneDir();
193
194 if (IndexWriter.isLocked(directory)) {
195 IndexWriter.unlock(directory);
196 }
197 }
198 catch (Exception e) {
199 _log.error("Check Lucene directory failed for " + _companyId, e);
200 }
201 }
202
203 private void _commit() throws IOException {
204 if ((PropsValues.LUCENE_COMMIT_BATCH_SIZE == 0) ||
205 (PropsValues.LUCENE_COMMIT_BATCH_SIZE <= _batchCount)) {
206
207 _doCommit();
208 }
209 }
210
211 private void _deleteDirectory() {
212 if (_log.isDebugEnabled()) {
213 _log.debug("Lucene store type " + PropsValues.LUCENE_STORE_TYPE);
214 }
215
216 if (PropsValues.LUCENE_STORE_TYPE.equals(_LUCENE_STORE_TYPE_FILE)) {
217 _deleteFile();
218 }
219 else if (PropsValues.LUCENE_STORE_TYPE.equals(
220 _LUCENE_STORE_TYPE_JDBC)) {
221
222 throw new IllegalArgumentException(
223 "Store type JDBC is no longer supported in favor of SOLR");
224 }
225 else if (PropsValues.LUCENE_STORE_TYPE.equals(_LUCENE_STORE_TYPE_RAM)) {
226 _deleteRam();
227 }
228 else {
229 throw new RuntimeException(
230 "Invalid store type " + PropsValues.LUCENE_STORE_TYPE);
231 }
232 }
233
234 private void _deleteFile() {
235 String path = _getPath();
236
237 try {
238 Directory directory = _getDirectory(path);
239
240 directory.close();
241 }
242 catch (Exception e) {
243 if (_log.isWarnEnabled()) {
244 _log.warn("Could not close directory " + path);
245 }
246 }
247
248 FileUtil.deltree(path);
249 }
250
251 private void _deleteRam() {
252 }
253
254 private void _doCommit() throws IOException {
255 if (_indexWriter != null) {
256 _commitLock.lock();
257
258 try {
259 _indexWriter.commit();
260 }
261 finally {
262 _commitLock.unlock();
263 }
264 }
265
266 _batchCount = 0;
267 }
268
269 private FSDirectory _getDirectory(String path) throws IOException {
270 if (PropsValues.LUCENE_STORE_TYPE_FILE_FORCE_MMAP) {
271 return new MMapDirectory(new File(path));
272 }
273 else {
274 return FSDirectory.open(new File(path));
275 }
276 }
277
278 private Directory _getLuceneDirFile() {
279 Directory directory = null;
280
281 String path = _getPath();
282
283 try {
284 directory = _getDirectory(path);
285 }
286 catch (IOException ioe1) {
287 if (directory != null) {
288 try {
289 directory.close();
290 }
291 catch (Exception e) {
292 }
293 }
294 }
295
296 return directory;
297 }
298
299 private Directory _getLuceneDirRam() {
300 String path = _getPath();
301
302 Directory directory = _ramDirectories.get(path);
303
304 if (directory == null) {
305 directory = new RAMDirectory();
306
307 _ramDirectories.put(path, directory);
308 }
309
310 return directory;
311 }
312
313 private String _getPath() {
314 return PropsValues.LUCENE_DIR.concat(String.valueOf(_companyId)).concat(
315 StringPool.SLASH);
316 }
317
318 private void _initCommitScheduler() {
319 if ((PropsValues.LUCENE_COMMIT_BATCH_SIZE <= 0) ||
320 (PropsValues.LUCENE_COMMIT_TIME_INTERVAL <= 0)) {
321
322 return;
323 }
324
325 ScheduledExecutorService scheduledExecutorService =
326 Executors.newSingleThreadScheduledExecutor();
327
328 Runnable runnable = new Runnable() {
329
330 public void run() {
331 try {
332 if (_batchCount > 0) {
333 _doCommit();
334 }
335 }
336 catch (IOException ioe) {
337 _log.error("Could not run scheduled commit", ioe);
338 }
339 }
340
341 };
342
343 scheduledExecutorService.scheduleWithFixedDelay(
344 runnable, 0, PropsValues.LUCENE_COMMIT_TIME_INTERVAL,
345 TimeUnit.MILLISECONDS);
346 }
347
348 private void _initIndexWriter() {
349 try {
350 _indexWriter = new IndexWriter(
351 getLuceneDir(), LuceneHelperUtil.getAnalyzer(),
352 _dumpIndexDeletionPolicy, IndexWriter.MaxFieldLength.LIMITED);
353
354 _indexWriter.setMergeFactor(PropsValues.LUCENE_MERGE_FACTOR);
355 _indexWriter.setRAMBufferSizeMB(PropsValues.LUCENE_BUFFER_SIZE);
356 }
357 catch (Exception e) {
358 _log.error(
359 "Initializing Lucene writer failed for " + _companyId, e);
360 }
361 }
362
363 private void _write(Term term, Document document) throws IOException {
364 try {
365 if (term != null) {
366 _indexWriter.updateDocument(term, document);
367 }
368 else {
369 _indexWriter.addDocument(document);
370 }
371
372 _optimizeCount++;
373
374 if ((PropsValues.LUCENE_OPTIMIZE_INTERVAL == 0) ||
375 (_optimizeCount >= PropsValues.LUCENE_OPTIMIZE_INTERVAL)) {
376
377 _indexWriter.optimize();
378
379 _optimizeCount = 0;
380 }
381
382 _batchCount++;
383 }
384 finally {
385 _commit();
386 }
387 }
388
389 private static final String _LUCENE_STORE_TYPE_FILE = "file";
390
391 private static final String _LUCENE_STORE_TYPE_JDBC = "jdbc";
392
393 private static final String _LUCENE_STORE_TYPE_RAM = "ram";
394
395 private static Log _log = LogFactoryUtil.getLog(IndexAccessorImpl.class);
396
397 private volatile int _batchCount;
398 private Lock _commitLock = new ReentrantLock();
399 private long _companyId;
400 private CountDownLatch _countDownLatch = new CountDownLatch(1);
401 private DumpIndexDeletionPolicy _dumpIndexDeletionPolicy =
402 new DumpIndexDeletionPolicy();
403 private IndexWriter _indexWriter;
404 private int _optimizeCount;
405 private Map<String, Directory> _ramDirectories =
406 new ConcurrentHashMap<String, Directory>();
407
408 }