1
14
15 package com.liferay.portal.search.lucene;
16
17 import com.liferay.portal.kernel.dao.jdbc.DataAccess;
18 import com.liferay.portal.kernel.log.Log;
19 import com.liferay.portal.kernel.log.LogFactoryUtil;
20 import com.liferay.portal.kernel.search.SearchEngineUtil;
21 import com.liferay.portal.kernel.util.CharPool;
22 import com.liferay.portal.kernel.util.FileUtil;
23 import com.liferay.portal.kernel.util.InfrastructureUtil;
24 import com.liferay.portal.kernel.util.PortalClassLoaderUtil;
25 import com.liferay.portal.kernel.util.PropsKeys;
26 import com.liferay.portal.kernel.util.StringPool;
27 import com.liferay.portal.util.PropsUtil;
28 import com.liferay.portal.util.PropsValues;
29
30 import java.io.File;
31 import java.io.IOException;
32
33 import java.sql.Connection;
34 import java.sql.DatabaseMetaData;
35 import java.sql.ResultSet;
36 import java.sql.Statement;
37
38 import java.util.Map;
39 import java.util.concurrent.ConcurrentHashMap;
40 import java.util.concurrent.Executors;
41 import java.util.concurrent.ScheduledExecutorService;
42 import java.util.concurrent.TimeUnit;
43
44 import javax.sql.DataSource;
45
46 import org.apache.lucene.document.Document;
47 import org.apache.lucene.index.IndexWriter;
48 import org.apache.lucene.index.Term;
49 import org.apache.lucene.store.Directory;
50 import org.apache.lucene.store.FSDirectory;
51 import org.apache.lucene.store.RAMDirectory;
52 import org.apache.lucene.store.jdbc.JdbcDirectory;
53 import org.apache.lucene.store.jdbc.JdbcStoreException;
54 import org.apache.lucene.store.jdbc.dialect.Dialect;
55 import org.apache.lucene.store.jdbc.lock.JdbcLock;
56 import org.apache.lucene.store.jdbc.support.JdbcTemplate;
57
58
65 public class IndexAccessorImpl implements IndexAccessor {
66
67 public IndexAccessorImpl(long companyId) {
68 _companyId = companyId;
69
70 _initDialect();
71 _checkLuceneDir();
72 _initIndexWriter();
73 _initCommitScheduler();
74 }
75
76 public void addDocument(Document document) throws IOException {
77 if (SearchEngineUtil.isIndexReadOnly()) {
78 return;
79 }
80
81 _write(null, document);
82 }
83
84 public void close() {
85 try {
86 _indexWriter.close();
87 }
88 catch(Exception e) {
89 _log.error(
90 "Closing Lucene writer failed for " + _companyId, e);
91 }
92 }
93
94 public void delete() {
95 if (SearchEngineUtil.isIndexReadOnly()) {
96 return;
97 }
98
99 close();
100
101 if (_log.isDebugEnabled()) {
102 _log.debug("Lucene store type " + PropsValues.LUCENE_STORE_TYPE);
103 }
104
105 if (PropsValues.LUCENE_STORE_TYPE.equals(_LUCENE_STORE_TYPE_FILE)) {
106 _deleteFile();
107 }
108 else if (PropsValues.LUCENE_STORE_TYPE.equals(
109 _LUCENE_STORE_TYPE_JDBC)) {
110
111 _deleteJdbc();
112 }
113 else if (PropsValues.LUCENE_STORE_TYPE.equals(_LUCENE_STORE_TYPE_RAM)) {
114 _deleteRam();
115 }
116 else {
117 throw new RuntimeException(
118 "Invalid store type " + PropsValues.LUCENE_STORE_TYPE);
119 }
120
121 _initIndexWriter();
122 }
123
124 public void deleteDocuments(Term term) throws IOException {
125 if (SearchEngineUtil.isIndexReadOnly()) {
126 return;
127 }
128
129 try {
130 _indexWriter.deleteDocuments(term);
131
132 _batchCount++;
133 }
134 finally {
135 _commit();
136 }
137 }
138
139 public long getCompanyId() {
140 return _companyId;
141 }
142
143 public Directory getLuceneDir() {
144 if (_log.isDebugEnabled()) {
145 _log.debug("Lucene store type " + PropsValues.LUCENE_STORE_TYPE);
146 }
147
148 if (PropsValues.LUCENE_STORE_TYPE.equals(_LUCENE_STORE_TYPE_FILE)) {
149 return _getLuceneDirFile();
150 }
151 else if (PropsValues.LUCENE_STORE_TYPE.equals(
152 _LUCENE_STORE_TYPE_JDBC)) {
153
154 return _getLuceneDirJdbc();
155 }
156 else if (PropsValues.LUCENE_STORE_TYPE.equals(_LUCENE_STORE_TYPE_RAM)) {
157 return _getLuceneDirRam();
158 }
159 else {
160 throw new RuntimeException(
161 "Invalid store type " + PropsValues.LUCENE_STORE_TYPE);
162 }
163 }
164
165 public void updateDocument(Term term, Document document)
166 throws IOException {
167
168 if (SearchEngineUtil.isIndexReadOnly()) {
169 return;
170 }
171
172 _write(term, document);
173 }
174
175 private void _checkLuceneDir() {
176 if (SearchEngineUtil.isIndexReadOnly()) {
177 return;
178 }
179
180 try {
181 Directory directory = getLuceneDir();
182
183 if (IndexWriter.isLocked(directory)) {
184 IndexWriter.unlock(directory);
185 }
186 }
187 catch (Exception e) {
188 _log.error("Check Lucene directory failed for " + _companyId, e);
189 }
190 }
191
192 private void _commit() throws IOException {
193 if ((PropsValues.LUCENE_COMMIT_BATCH_SIZE == 0) ||
194 (PropsValues.LUCENE_COMMIT_BATCH_SIZE <= _batchCount)) {
195
196 _doCommit();
197 }
198 }
199
200 private void _deleteFile() {
201 String path = _getPath();
202
203 try {
204 Directory directory = _getDirectory(path);
205
206 directory.close();
207 }
208 catch (Exception e) {
209 if (_log.isWarnEnabled()) {
210 _log.warn("Could not close directory " + path);
211 }
212 }
213
214 FileUtil.deltree(path);
215 }
216
217 private void _deleteJdbc() {
218 String tableName = _getTableName();
219
220 try {
221 Directory directory = _jdbcDirectories.remove(tableName);
222
223 if (directory != null) {
224 directory.close();
225 }
226 }
227 catch (Exception e) {
228 if (_log.isWarnEnabled()) {
229 _log.warn("Could not close directory " + tableName);
230 }
231 }
232
233 Connection con = null;
234 Statement s = null;
235
236 try {
237 con = DataAccess.getConnection();
238
239 s = con.createStatement();
240
241 s.executeUpdate("DELETE FROM " + tableName);
242 }
243 catch (Exception e) {
244 if (_log.isWarnEnabled()) {
245 _log.warn("Could not truncate " + tableName);
246 }
247 }
248 finally {
249 DataAccess.cleanUp(con, s);
250 }
251 }
252
253 private void _deleteRam() {
254 }
255
256 private void _doCommit() throws IOException {
257 if (_indexWriter != null) {
258 _indexWriter.commit();
259 }
260
261 _batchCount = 0;
262 }
263
264 private FSDirectory _getDirectory(String path) throws IOException {
265 return FSDirectory.open(new File(path));
266 }
267
268 private Directory _getLuceneDirFile() {
269 Directory directory = null;
270
271 String path = _getPath();
272
273 try {
274 directory = _getDirectory(path);
275 }
276 catch (IOException ioe1) {
277 if (directory != null) {
278 try {
279 directory.close();
280 }
281 catch (Exception e) {
282 }
283 }
284 }
285
286 return directory;
287 }
288
289 private Directory _getLuceneDirJdbc() {
290 JdbcDirectory jdbcDirectory = null;
291
292 Thread currentThread = Thread.currentThread();
293
294 ClassLoader contextClassLoader = currentThread.getContextClassLoader();
295
296 try {
297 currentThread.setContextClassLoader(
298 PortalClassLoaderUtil.getClassLoader());
299
300 String tableName = _getTableName();
301
302 jdbcDirectory = (JdbcDirectory)_jdbcDirectories.get(tableName);
303
304 if (jdbcDirectory != null) {
305 return jdbcDirectory;
306 }
307
308 try {
309 DataSource dataSource = InfrastructureUtil.getDataSource();
310
311 jdbcDirectory = new JdbcDirectory(
312 dataSource, _dialect, tableName);
313
314 _jdbcDirectories.put(tableName, jdbcDirectory);
315
316 if (!jdbcDirectory.tableExists()) {
317 jdbcDirectory.create();
318 }
319 }
320 catch (IOException ioe) {
321 throw new RuntimeException(ioe);
322 }
323 catch (UnsupportedOperationException uoe) {
324 if (_log.isWarnEnabled()) {
325 _log.warn(
326 "Database doesn't support the ability to check " +
327 "whether a table exists");
328 }
329
330 _manuallyCreateJdbcDirectory(jdbcDirectory, tableName);
331 }
332 }
333 finally {
334 currentThread.setContextClassLoader(contextClassLoader);
335 }
336
337 return jdbcDirectory;
338 }
339
340 private Directory _getLuceneDirRam() {
341 String path = _getPath();
342
343 Directory directory = _ramDirectories.get(path);
344
345 if (directory == null) {
346 directory = new RAMDirectory();
347
348 _ramDirectories.put(path, directory);
349 }
350
351 return directory;
352 }
353
354 private String _getPath() {
355 return PropsValues.LUCENE_DIR.concat(String.valueOf(_companyId)).concat(
356 StringPool.SLASH);
357 }
358
359 private String _getTableName() {
360 return _LUCENE_TABLE_PREFIX + _companyId;
361 }
362
363 private void _initCommitScheduler() {
364 if ((PropsValues.LUCENE_COMMIT_BATCH_SIZE <= 0) ||
365 (PropsValues.LUCENE_COMMIT_TIME_INTERVAL <= 0)) {
366
367 return;
368 }
369
370 ScheduledExecutorService scheduledExecutorService =
371 Executors.newSingleThreadScheduledExecutor();
372
373 Runnable runnable = new Runnable() {
374
375 public void run() {
376 try {
377 _doCommit();
378 }
379 catch (IOException ioe) {
380 _log.error("Could not run scheduled commit", ioe);
381 }
382 }
383
384 };
385
386 scheduledExecutorService.scheduleWithFixedDelay(
387 runnable, 0, PropsValues.LUCENE_COMMIT_TIME_INTERVAL,
388 TimeUnit.MILLISECONDS);
389 }
390
391 private void _initDialect() {
392 if (!PropsValues.LUCENE_STORE_TYPE.equals(_LUCENE_STORE_TYPE_JDBC)) {
393 return;
394 }
395
396 Connection con = null;
397
398 try {
399 con = DataAccess.getConnection();
400
401 String url = con.getMetaData().getURL();
402
403 int x = url.indexOf(CharPool.COLON);
404 int y = url.indexOf(CharPool.COLON, x + 1);
405
406 String urlPrefix = url.substring(x + 1, y);
407
408 String dialectClass = PropsUtil.get(
409 PropsKeys.LUCENE_STORE_JDBC_DIALECT + urlPrefix);
410
411 if (dialectClass != null) {
412 if (_log.isDebugEnabled()) {
413 _log.debug("JDBC class implementation " + dialectClass);
414 }
415 }
416 else {
417 if (_log.isDebugEnabled()) {
418 _log.debug("JDBC class implementation is null");
419 }
420 }
421
422 if (dialectClass != null) {
423 _dialect = (Dialect)Class.forName(dialectClass).newInstance();
424 }
425 }
426 catch (Exception e) {
427 _log.error(e);
428 }
429 finally{
430 DataAccess.cleanUp(con);
431 }
432
433 if (_dialect == null) {
434 _log.error("No JDBC dialect found");
435 }
436 }
437
438 private void _initIndexWriter() {
439 try {
440 _indexWriter = new IndexWriter(
441 getLuceneDir(), LuceneHelperUtil.getAnalyzer(),
442 IndexWriter.MaxFieldLength.LIMITED);
443
444 _indexWriter.setMergeFactor(PropsValues.LUCENE_MERGE_FACTOR);
445 _indexWriter.setRAMBufferSizeMB(PropsValues.LUCENE_BUFFER_SIZE);
446 }
447 catch (Exception e) {
448 _log.error(
449 "Initializing Lucene writer failed for " + _companyId, e);
450 }
451 }
452
453 private void _manuallyCreateJdbcDirectory(
454 JdbcDirectory jdbcDirectory, String tableName) {
455
456
458 Connection con = null;
459 ResultSet rs = null;
460
461 try {
462 con = DataAccess.getConnection();
463
464
466 DatabaseMetaData metaData = con.getMetaData();
467
468 rs = metaData.getTables(null, null, tableName, null);
469
470 if (!rs.next()) {
471 JdbcTemplate jdbcTemplate = jdbcDirectory.getJdbcTemplate();
472
473 jdbcTemplate.executeUpdate(
474 jdbcDirectory.getTable().sqlCreate());
475
476 Class<?> lockClass = jdbcDirectory.getSettings().getLockClass();
477
478 JdbcLock jdbcLock = null;
479
480 try {
481 jdbcLock = (JdbcLock)lockClass.newInstance();
482 }
483 catch (Exception e) {
484 throw new JdbcStoreException(
485 "Could not create lock class " + lockClass);
486 }
487
488 jdbcLock.initializeDatabase(jdbcDirectory);
489 }
490 }
491 catch (Exception e) {
492 if (_log.isWarnEnabled()) {
493 _log.warn("Could not create " + tableName);
494 }
495 }
496 finally {
497 DataAccess.cleanUp(con, null, rs);
498 }
499 }
500
501 private void _write(Term term, Document document) throws IOException {
502 try {
503 if (term != null) {
504 _indexWriter.updateDocument(term, document);
505 }
506 else {
507 _indexWriter.addDocument(document);
508 }
509
510 _optimizeCount++;
511
512 if ((PropsValues.LUCENE_OPTIMIZE_INTERVAL == 0) ||
513 (_optimizeCount >= PropsValues.LUCENE_OPTIMIZE_INTERVAL)) {
514
515 _indexWriter.optimize();
516
517 _optimizeCount = 0;
518 }
519
520 _batchCount++;
521 }
522 finally {
523 _commit();
524 }
525 }
526
527 private static final String _LUCENE_STORE_TYPE_FILE = "file";
528
529 private static final String _LUCENE_STORE_TYPE_JDBC = "jdbc";
530
531 private static final String _LUCENE_STORE_TYPE_RAM = "ram";
532
533 private static final String _LUCENE_TABLE_PREFIX = "LUCENE_";
534
535 private static Log _log = LogFactoryUtil.getLog(IndexAccessorImpl.class);
536
537 private int _batchCount;
538 private long _companyId;
539 private Dialect _dialect;
540 private IndexWriter _indexWriter;
541 private Map<String, Directory> _jdbcDirectories =
542 new ConcurrentHashMap<String, Directory>();
543 private int _optimizeCount;
544 private Map<String, Directory> _ramDirectories =
545 new ConcurrentHashMap<String, Directory>();
546
547 }