1
14
15 package com.liferay.portal.search.lucene;
16
17 import com.liferay.portal.kernel.dao.jdbc.DataAccess;
18 import com.liferay.portal.kernel.log.Log;
19 import com.liferay.portal.kernel.log.LogFactoryUtil;
20 import com.liferay.portal.kernel.search.SearchEngineUtil;
21 import com.liferay.portal.kernel.util.FileUtil;
22 import com.liferay.portal.kernel.util.InfrastructureUtil;
23 import com.liferay.portal.kernel.util.PortalClassLoaderUtil;
24 import com.liferay.portal.kernel.util.PropsKeys;
25 import com.liferay.portal.kernel.util.StringPool;
26 import com.liferay.portal.util.PropsUtil;
27 import com.liferay.portal.util.PropsValues;
28
29 import java.io.File;
30 import java.io.IOException;
31
32 import java.sql.Connection;
33 import java.sql.DatabaseMetaData;
34 import java.sql.ResultSet;
35 import java.sql.Statement;
36
37 import java.util.Map;
38 import java.util.concurrent.ConcurrentHashMap;
39 import java.util.concurrent.Executors;
40 import java.util.concurrent.ScheduledExecutorService;
41 import java.util.concurrent.TimeUnit;
42
43 import javax.sql.DataSource;
44
45 import org.apache.lucene.document.Document;
46 import org.apache.lucene.index.IndexWriter;
47 import org.apache.lucene.index.Term;
48 import org.apache.lucene.store.Directory;
49 import org.apache.lucene.store.FSDirectory;
50 import org.apache.lucene.store.RAMDirectory;
51 import org.apache.lucene.store.jdbc.JdbcDirectory;
52 import org.apache.lucene.store.jdbc.JdbcStoreException;
53 import org.apache.lucene.store.jdbc.dialect.Dialect;
54 import org.apache.lucene.store.jdbc.lock.JdbcLock;
55 import org.apache.lucene.store.jdbc.support.JdbcTemplate;
56
57
64 public class IndexAccessorImpl implements IndexAccessor {
65
66 public IndexAccessorImpl(long companyId) {
67 _companyId = companyId;
68
69 _checkLuceneDir();
70 _initIndexWriter();
71 _initCommitScheduler();
72 _initDialect();
73 }
74
75 public void addDocument(Document document) throws IOException {
76 if (SearchEngineUtil.isIndexReadOnly()) {
77 return;
78 }
79
80 _write(null, document);
81 }
82
83 public void close() {
84 try {
85 _indexWriter.close();
86 }
87 catch(Exception e) {
88 _log.error(
89 "Closing Lucene writer failed for " + _companyId, e);
90 }
91 }
92
93 public void delete() {
94 if (SearchEngineUtil.isIndexReadOnly()) {
95 return;
96 }
97
98 close();
99
100 if (_log.isDebugEnabled()) {
101 _log.debug("Lucene store type " + PropsValues.LUCENE_STORE_TYPE);
102 }
103
104 if (PropsValues.LUCENE_STORE_TYPE.equals(_LUCENE_STORE_TYPE_FILE)) {
105 _deleteFile();
106 }
107 else if (PropsValues.LUCENE_STORE_TYPE.equals(
108 _LUCENE_STORE_TYPE_JDBC)) {
109
110 _deleteJdbc();
111 }
112 else if (PropsValues.LUCENE_STORE_TYPE.equals(_LUCENE_STORE_TYPE_RAM)) {
113 _deleteRam();
114 }
115 else {
116 throw new RuntimeException(
117 "Invalid store type " + PropsValues.LUCENE_STORE_TYPE);
118 }
119
120 _initIndexWriter();
121 }
122
123 public void deleteDocuments(Term term) throws IOException {
124 if (SearchEngineUtil.isIndexReadOnly()) {
125 return;
126 }
127
128 try {
129 _indexWriter.deleteDocuments(term);
130
131 _batchCount++;
132 }
133 finally {
134 _commit();
135 }
136 }
137
138 public long getCompanyId() {
139 return _companyId;
140 }
141
142 public Directory getLuceneDir() {
143 if (_log.isDebugEnabled()) {
144 _log.debug("Lucene store type " + PropsValues.LUCENE_STORE_TYPE);
145 }
146
147 if (PropsValues.LUCENE_STORE_TYPE.equals(_LUCENE_STORE_TYPE_FILE)) {
148 return _getLuceneDirFile();
149 }
150 else if (PropsValues.LUCENE_STORE_TYPE.equals(
151 _LUCENE_STORE_TYPE_JDBC)) {
152
153 return _getLuceneDirJdbc();
154 }
155 else if (PropsValues.LUCENE_STORE_TYPE.equals(_LUCENE_STORE_TYPE_RAM)) {
156 return _getLuceneDirRam();
157 }
158 else {
159 throw new RuntimeException(
160 "Invalid store type " + PropsValues.LUCENE_STORE_TYPE);
161 }
162 }
163
164 public void updateDocument(Term term, Document document)
165 throws IOException {
166
167 if (SearchEngineUtil.isIndexReadOnly()) {
168 return;
169 }
170
171 _write(term, document);
172 }
173
174 private void _checkLuceneDir() {
175 if (SearchEngineUtil.isIndexReadOnly()) {
176 return;
177 }
178
179 try {
180 Directory directory = getLuceneDir();
181
182 if (IndexWriter.isLocked(directory)) {
183 IndexWriter.unlock(directory);
184 }
185 }
186 catch (Exception e) {
187 _log.error("Check Lucene directory failed for " + _companyId, e);
188 }
189 }
190
191 private void _commit() throws IOException {
192 if ((PropsValues.LUCENE_COMMIT_BATCH_SIZE == 0) ||
193 (PropsValues.LUCENE_COMMIT_BATCH_SIZE <= _batchCount)) {
194
195 _doCommit();
196 }
197 }
198
199 private void _deleteFile() {
200 String path = _getPath();
201
202 try {
203 Directory directory = _getDirectory(path);
204
205 directory.close();
206 }
207 catch (Exception e) {
208 if (_log.isWarnEnabled()) {
209 _log.warn("Could not close directory " + path);
210 }
211 }
212
213 FileUtil.deltree(path);
214 }
215
216 private void _deleteJdbc() {
217 String tableName = _getTableName();
218
219 try {
220 Directory directory = _jdbcDirectories.remove(tableName);
221
222 if (directory != null) {
223 directory.close();
224 }
225 }
226 catch (Exception e) {
227 if (_log.isWarnEnabled()) {
228 _log.warn("Could not close directory " + tableName);
229 }
230 }
231
232 Connection con = null;
233 Statement s = null;
234
235 try {
236 con = DataAccess.getConnection();
237
238 s = con.createStatement();
239
240 s.executeUpdate("DELETE FROM " + tableName);
241 }
242 catch (Exception e) {
243 if (_log.isWarnEnabled()) {
244 _log.warn("Could not truncate " + tableName);
245 }
246 }
247 finally {
248 DataAccess.cleanUp(con, s);
249 }
250 }
251
252 private void _deleteRam() {
253 }
254
255 private void _doCommit() throws IOException {
256 if (_indexWriter != null) {
257 _indexWriter.commit();
258 }
259
260 _batchCount = 0;
261 }
262
263 private FSDirectory _getDirectory(String path) throws IOException {
264 return FSDirectory.open(new File(path));
265 }
266
267 private Directory _getLuceneDirFile() {
268 Directory directory = null;
269
270 String path = _getPath();
271
272 try {
273 directory = _getDirectory(path);
274 }
275 catch (IOException ioe1) {
276 if (directory != null) {
277 try {
278 directory.close();
279 }
280 catch (Exception e) {
281 }
282 }
283 }
284
285 return directory;
286 }
287
288 private Directory _getLuceneDirJdbc() {
289 JdbcDirectory jdbcDirectory = null;
290
291 Thread currentThread = Thread.currentThread();
292
293 ClassLoader contextClassLoader = currentThread.getContextClassLoader();
294
295 try {
296 currentThread.setContextClassLoader(
297 PortalClassLoaderUtil.getClassLoader());
298
299 String tableName = _getTableName();
300
301 jdbcDirectory = (JdbcDirectory)_jdbcDirectories.get(tableName);
302
303 if (jdbcDirectory != null) {
304 return jdbcDirectory;
305 }
306
307 try {
308 DataSource dataSource = InfrastructureUtil.getDataSource();
309
310 jdbcDirectory = new JdbcDirectory(
311 dataSource, _dialect, tableName);
312
313 _jdbcDirectories.put(tableName, jdbcDirectory);
314
315 if (!jdbcDirectory.tableExists()) {
316 jdbcDirectory.create();
317 }
318 }
319 catch (IOException ioe) {
320 throw new RuntimeException(ioe);
321 }
322 catch (UnsupportedOperationException uoe) {
323 if (_log.isWarnEnabled()) {
324 _log.warn(
325 "Database doesn't support the ability to check " +
326 "whether a table exists");
327 }
328
329 _manuallyCreateJdbcDirectory(jdbcDirectory, tableName);
330 }
331 }
332 finally {
333 currentThread.setContextClassLoader(contextClassLoader);
334 }
335
336 return jdbcDirectory;
337 }
338
339 private Directory _getLuceneDirRam() {
340 String path = _getPath();
341
342 Directory directory = _ramDirectories.get(path);
343
344 if (directory == null) {
345 directory = new RAMDirectory();
346
347 _ramDirectories.put(path, directory);
348 }
349
350 return directory;
351 }
352
353 private String _getPath() {
354 return PropsValues.LUCENE_DIR.concat(String.valueOf(_companyId)).concat(
355 StringPool.SLASH);
356 }
357
358 private String _getTableName() {
359 return _LUCENE_TABLE_PREFIX + _companyId;
360 }
361
362 private void _initCommitScheduler() {
363 if ((PropsValues.LUCENE_COMMIT_BATCH_SIZE <= 0) ||
364 (PropsValues.LUCENE_COMMIT_TIME_INTERVAL <= 0)) {
365
366 return;
367 }
368
369 ScheduledExecutorService scheduledExecutorService =
370 Executors.newSingleThreadScheduledExecutor();
371
372 Runnable runnable = new Runnable() {
373
374 public void run() {
375 try {
376 _doCommit();
377 }
378 catch (IOException ioe) {
379 _log.error("Could not run scheduled commit", ioe);
380 }
381 }
382
383 };
384
385 scheduledExecutorService.scheduleWithFixedDelay(
386 runnable, 0, PropsValues.LUCENE_COMMIT_TIME_INTERVAL,
387 TimeUnit.MILLISECONDS);
388 }
389
390 private void _initDialect() {
391 if (!PropsValues.LUCENE_STORE_TYPE.equals(_LUCENE_STORE_TYPE_JDBC)) {
392 return;
393 }
394
395 Connection con = null;
396
397 try {
398 con = DataAccess.getConnection();
399
400 String url = con.getMetaData().getURL();
401
402 int x = url.indexOf(StringPool.COLON);
403 int y = url.indexOf(StringPool.COLON, x + 1);
404
405 String urlPrefix = url.substring(x + 1, y);
406
407 String dialectClass = PropsUtil.get(
408 PropsKeys.LUCENE_STORE_JDBC_DIALECT + urlPrefix);
409
410 if (dialectClass != null) {
411 if (_log.isDebugEnabled()) {
412 _log.debug("JDBC class implementation " + dialectClass);
413 }
414 }
415 else {
416 if (_log.isDebugEnabled()) {
417 _log.debug("JDBC class implementation is null");
418 }
419 }
420
421 if (dialectClass != null) {
422 _dialect = (Dialect)Class.forName(dialectClass).newInstance();
423 }
424 }
425 catch (Exception e) {
426 _log.error(e);
427 }
428 finally{
429 DataAccess.cleanUp(con);
430 }
431
432 if (_dialect == null) {
433 _log.error("No JDBC dialect found");
434 }
435 }
436
437 private void _initIndexWriter() {
438 try {
439 _indexWriter = new IndexWriter(
440 getLuceneDir(), LuceneHelperUtil.getAnalyzer(),
441 IndexWriter.MaxFieldLength.LIMITED);
442
443 _indexWriter.setMergeFactor(PropsValues.LUCENE_MERGE_FACTOR);
444 _indexWriter.setRAMBufferSizeMB(PropsValues.LUCENE_BUFFER_SIZE);
445 }
446 catch (Exception e) {
447 _log.error(
448 "Initializing Lucene writer failed for " + _companyId, e);
449 }
450 }
451
452 private void _manuallyCreateJdbcDirectory(
453 JdbcDirectory jdbcDirectory, String tableName) {
454
455
457 Connection con = null;
458 ResultSet rs = null;
459
460 try {
461 con = DataAccess.getConnection();
462
463
465 DatabaseMetaData metaData = con.getMetaData();
466
467 rs = metaData.getTables(null, null, tableName, null);
468
469 if (!rs.next()) {
470 JdbcTemplate jdbcTemplate = jdbcDirectory.getJdbcTemplate();
471
472 jdbcTemplate.executeUpdate(
473 jdbcDirectory.getTable().sqlCreate());
474
475 Class<?> lockClass = jdbcDirectory.getSettings().getLockClass();
476
477 JdbcLock jdbcLock = null;
478
479 try {
480 jdbcLock = (JdbcLock)lockClass.newInstance();
481 }
482 catch (Exception e) {
483 throw new JdbcStoreException(
484 "Could not create lock class " + lockClass);
485 }
486
487 jdbcLock.initializeDatabase(jdbcDirectory);
488 }
489 }
490 catch (Exception e) {
491 if (_log.isWarnEnabled()) {
492 _log.warn("Could not create " + tableName);
493 }
494 }
495 finally {
496 DataAccess.cleanUp(con, null, rs);
497 }
498 }
499
500 private void _write(Term term, Document document) throws IOException {
501 try {
502 if (term != null) {
503 _indexWriter.updateDocument(term, document);
504 }
505 else {
506 _indexWriter.addDocument(document);
507 }
508
509 _optimizeCount++;
510
511 if ((PropsValues.LUCENE_OPTIMIZE_INTERVAL == 0) ||
512 (_optimizeCount >= PropsValues.LUCENE_OPTIMIZE_INTERVAL)) {
513
514 _indexWriter.optimize();
515
516 _optimizeCount = 0;
517 }
518
519 _batchCount++;
520 }
521 finally {
522 _commit();
523 }
524 }
525
526 private static final String _LUCENE_STORE_TYPE_FILE = "file";
527
528 private static final String _LUCENE_STORE_TYPE_JDBC = "jdbc";
529
530 private static final String _LUCENE_STORE_TYPE_RAM = "ram";
531
532 private static final String _LUCENE_TABLE_PREFIX = "LUCENE_";
533
534 private static Log _log = LogFactoryUtil.getLog(IndexAccessorImpl.class);
535
536 private int _batchCount;
537 private long _companyId;
538 private Dialect _dialect;
539 private IndexWriter _indexWriter;
540 private Map<String, Directory> _jdbcDirectories =
541 new ConcurrentHashMap<String, Directory>();
542 private int _optimizeCount;
543 private Map<String, Directory> _ramDirectories =
544 new ConcurrentHashMap<String, Directory>();
545
546 }