2012-02-20 21:47:14 +00:00
|
|
|
/* This file is (c) 2008-2012 Konstantin Isakov <ikm@goldendict.org>
|
2012-02-09 12:49:41 +00:00
|
|
|
* Part of GoldenDict. Licensed under GPLv3 or later, see the LICENSE file */
|
|
|
|
|
|
|
|
#include "sdict.hh"
|
|
|
|
#include "btreeidx.hh"
|
|
|
|
#include "folding.hh"
|
|
|
|
#include "utf8.hh"
|
|
|
|
#include "chunkedstorage.hh"
|
|
|
|
#include "langcoder.hh"
|
2013-11-16 18:34:09 +00:00
|
|
|
#include "gddebug.hh"
|
2023-04-28 16:09:45 +00:00
|
|
|
|
2012-02-09 12:50:38 +00:00
|
|
|
#include "decompress.hh"
|
2013-07-14 15:37:38 +00:00
|
|
|
#include "htmlescape.hh"
|
2014-04-16 16:18:28 +00:00
|
|
|
#include "ftshelpers.hh"
|
2012-02-09 12:49:41 +00:00
|
|
|
|
|
|
|
#include <map>
|
|
|
|
#include <set>
|
|
|
|
#include <string>
|
|
|
|
|
|
|
|
#ifdef _MSC_VER
|
|
|
|
#include <stub_msvc.h>
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#include <QString>
|
|
|
|
#include <QSemaphore>
|
|
|
|
#include <QAtomicInt>
|
2022-02-27 05:17:37 +00:00
|
|
|
#if ( QT_VERSION >= QT_VERSION_CHECK( 6, 0, 0 ) )
|
|
|
|
#include <QtCore5Compat>
|
|
|
|
#endif
|
2018-02-21 14:43:35 +00:00
|
|
|
#include <QRegularExpression>
|
2012-02-09 12:49:41 +00:00
|
|
|
|
2021-11-27 07:17:33 +00:00
|
|
|
#include "utils.hh"
|
2012-02-09 12:49:41 +00:00
|
|
|
|
|
|
|
namespace Sdict {
|
|
|
|
|
|
|
|
using std::map;
|
|
|
|
using std::multimap;
|
|
|
|
using std::pair;
|
|
|
|
using std::set;
|
|
|
|
using std::string;
|
|
|
|
using gd::wstring;
|
|
|
|
|
|
|
|
using BtreeIndexing::WordArticleLink;
|
|
|
|
using BtreeIndexing::IndexedWords;
|
|
|
|
using BtreeIndexing::IndexInfo;
|
|
|
|
|
|
|
|
namespace {
|
|
|
|
|
|
|
|
DEF_EX_STR( exNotDctFile, "Not an Sdictionary file", Dictionary::Ex )
|
2023-06-19 22:36:43 +00:00
|
|
|
using Dictionary::exCantReadFile;
|
2012-02-09 12:49:41 +00:00
|
|
|
DEF_EX_STR( exWordIsTooLarge, "Enountered a word that is too large:", Dictionary::Ex )
|
|
|
|
DEF_EX_STR( exSuddenEndOfFile, "Sudden end of file", Dictionary::Ex )
|
|
|
|
|
|
|
|
#pragma pack( push, 1 )
|
|
|
|
|
|
|
|
/// DCT file header
|
|
|
|
struct DCT_header
|
|
|
|
{
|
|
|
|
char signature[ 4 ];
|
|
|
|
char inputLang[ 3 ];
|
|
|
|
char outputLang[ 3 ];
|
|
|
|
uint8_t compression;
|
|
|
|
uint32_t wordCount;
|
|
|
|
uint32_t shortIndexLength;
|
|
|
|
uint32_t titleOffset;
|
|
|
|
uint32_t copyrightOffset;
|
|
|
|
uint32_t versionOffset;
|
|
|
|
uint32_t shortIndexOffset;
|
|
|
|
uint32_t fullIndexOffset;
|
|
|
|
uint32_t articlesOffset;
|
|
|
|
}
|
|
|
|
#ifndef _MSC_VER
|
|
|
|
__attribute__( ( packed ) )
|
|
|
|
#endif
|
|
|
|
;
|
|
|
|
|
|
|
|
struct IndexElement
|
|
|
|
{
|
|
|
|
uint16_t nextWord;
|
|
|
|
uint16_t previousWord;
|
|
|
|
uint32_t articleOffset;
|
|
|
|
}
|
|
|
|
#ifndef _MSC_VER
|
|
|
|
__attribute__( ( packed ) )
|
|
|
|
#endif
|
|
|
|
;
|
|
|
|
|
|
|
|
enum {
|
|
|
|
Signature = 0x43494453, // SDIC on little-endian, CIDS on big-endian
|
|
|
|
CurrentFormatVersion = 1 + BtreeIndexing::FormatVersion + Folding::Version
|
|
|
|
};
|
|
|
|
|
|
|
|
struct IdxHeader
|
|
|
|
{
|
|
|
|
uint32_t signature; // First comes the signature, SDIC
|
|
|
|
uint32_t formatVersion; // File format version (CurrentFormatVersion)
|
|
|
|
uint32_t chunksOffset; // The offset to chunks' storage
|
|
|
|
uint32_t indexBtreeMaxElements; // Two fields from IndexInfo
|
|
|
|
uint32_t indexRootOffset;
|
|
|
|
uint32_t wordCount;
|
|
|
|
uint32_t articleCount;
|
|
|
|
uint32_t compressionType; // Data compression in file. 0 - no compression, 1 - zip, 2 - bzip2
|
|
|
|
uint32_t langFrom; // Source language
|
|
|
|
uint32_t langTo; // Target language
|
|
|
|
}
|
|
|
|
#ifndef _MSC_VER
|
|
|
|
__attribute__( ( packed ) )
|
|
|
|
#endif
|
|
|
|
;
|
|
|
|
|
2013-09-20 14:25:44 +00:00
|
|
|
#pragma pack( pop )
|
2012-02-09 12:49:41 +00:00
|
|
|
|
|
|
|
bool indexIsOldOrBad( string const & indexFile )
|
|
|
|
{
|
|
|
|
File::Class idx( indexFile, "rb" );
|
|
|
|
|
|
|
|
IdxHeader header;
|
|
|
|
|
|
|
|
return idx.readRecords( &header, sizeof( header ), 1 ) != 1 || header.signature != Signature
|
|
|
|
|| header.formatVersion != CurrentFormatVersion;
|
|
|
|
}
|
|
|
|
|
|
|
|
class SdictDictionary: public BtreeIndexing::BtreeDictionary
|
|
|
|
{
|
2023-05-29 13:56:04 +00:00
|
|
|
QMutex idxMutex, sdictMutex;
|
|
|
|
File::Class idx;
|
|
|
|
IdxHeader idxHeader;
|
2012-02-09 12:49:41 +00:00
|
|
|
ChunkedStorage::Reader chunks;
|
|
|
|
File::Class df;
|
|
|
|
|
|
|
|
public:
|
|
|
|
|
|
|
|
SdictDictionary( string const & id, string const & indexFile, vector< string > const & dictionaryFiles );
|
|
|
|
|
|
|
|
~SdictDictionary();
|
|
|
|
|
2022-12-29 07:07:40 +00:00
|
|
|
string getName() noexcept override
|
2012-02-09 12:49:41 +00:00
|
|
|
{
|
|
|
|
return dictionaryName;
|
|
|
|
}
|
|
|
|
|
2022-12-29 07:07:40 +00:00
|
|
|
map< Dictionary::Property, string > getProperties() noexcept override
|
2012-02-09 12:49:41 +00:00
|
|
|
{
|
|
|
|
return map< Dictionary::Property, string >();
|
|
|
|
}
|
|
|
|
|
2022-12-29 07:07:40 +00:00
|
|
|
unsigned long getArticleCount() noexcept override
|
2012-02-09 12:49:41 +00:00
|
|
|
{
|
|
|
|
return idxHeader.articleCount;
|
2023-07-20 08:02:22 +00:00
|
|
|
}
|
|
|
|
|
2022-12-29 07:07:40 +00:00
|
|
|
unsigned long getWordCount() noexcept override
|
2023-07-20 08:02:22 +00:00
|
|
|
{
|
2012-02-09 12:49:41 +00:00
|
|
|
return idxHeader.wordCount;
|
|
|
|
}
|
|
|
|
|
2022-12-29 07:07:40 +00:00
|
|
|
inline quint32 getLangFrom() const override
|
2012-02-09 12:49:41 +00:00
|
|
|
{
|
|
|
|
return idxHeader.langFrom;
|
|
|
|
}
|
|
|
|
|
2022-12-29 07:07:40 +00:00
|
|
|
inline quint32 getLangTo() const override
|
2012-02-09 12:49:41 +00:00
|
|
|
{
|
|
|
|
return idxHeader.langTo;
|
|
|
|
}
|
|
|
|
|
2022-12-29 07:07:40 +00:00
|
|
|
sptr< Dictionary::DataRequest >
|
|
|
|
getArticle( wstring const &, vector< wstring > const & alts, wstring const &, bool ignoreDiacritics ) override;
|
2012-02-09 12:49:41 +00:00
|
|
|
|
2022-12-29 07:07:40 +00:00
|
|
|
QString const & getDescription() override;
|
2012-02-09 12:49:41 +00:00
|
|
|
|
2022-12-29 07:07:40 +00:00
|
|
|
sptr< Dictionary::DataRequest >
|
|
|
|
getSearchResults( QString const & searchString, int searchMode, bool matchCase, bool ignoreDiacritics ) override;
|
|
|
|
void getArticleText( uint32_t articleAddress, QString & headword, QString & text ) override;
|
2014-06-30 13:44:16 +00:00
|
|
|
|
2023-05-30 23:42:31 +00:00
|
|
|
void makeFTSIndex( QAtomicInt & isCancelled, bool firstIteration ) override;
|
2014-04-16 16:18:28 +00:00
|
|
|
|
2022-12-29 07:07:40 +00:00
|
|
|
void setFTSParameters( Config::FullTextSearch const & fts ) override
|
2023-07-20 08:02:22 +00:00
|
|
|
{
|
2023-07-26 02:03:20 +00:00
|
|
|
can_FTS = enable_FTS && fts.enabled && !fts.disabledTypes.contains( "SDICT", Qt::CaseInsensitive )
|
2022-12-29 07:07:40 +00:00
|
|
|
&& ( fts.maxDictionarySize == 0 || getArticleCount() <= fts.maxDictionarySize );
|
2023-07-20 08:02:22 +00:00
|
|
|
}
|
2014-04-16 16:18:28 +00:00
|
|
|
|
2012-12-03 12:47:43 +00:00
|
|
|
protected:
|
2012-02-09 12:49:41 +00:00
|
|
|
|
2022-12-29 07:07:40 +00:00
|
|
|
void loadIcon() noexcept override;
|
2012-12-03 12:47:43 +00:00
|
|
|
|
|
|
|
private:
|
2012-02-09 12:49:41 +00:00
|
|
|
|
|
|
|
/// Loads the article.
|
|
|
|
void loadArticle( uint32_t address, string & articleText );
|
|
|
|
string convert( string const & in_data );
|
|
|
|
|
|
|
|
friend class SdictArticleRequest;
|
|
|
|
};
|
|
|
|
|
|
|
|
SdictDictionary::SdictDictionary( string const & id,
|
|
|
|
string const & indexFile,
|
|
|
|
vector< string > const & dictionaryFiles ):
|
|
|
|
BtreeDictionary( id, dictionaryFiles ),
|
|
|
|
idx( indexFile, "rb" ),
|
|
|
|
idxHeader( idx.read< IdxHeader >() ),
|
|
|
|
chunks( idx, idxHeader.chunksOffset ),
|
2012-12-03 12:47:43 +00:00
|
|
|
df( dictionaryFiles[ 0 ], "rb" )
|
2012-02-09 12:49:41 +00:00
|
|
|
{
|
|
|
|
// Read dictionary name
|
2023-07-20 08:02:22 +00:00
|
|
|
|
2012-02-09 12:49:41 +00:00
|
|
|
idx.seek( sizeof( idxHeader ) );
|
|
|
|
vector< char > dName( idx.read< uint32_t >() );
|
2019-01-17 14:53:13 +00:00
|
|
|
if ( dName.size() > 0 ) {
|
|
|
|
idx.read( &dName.front(), dName.size() );
|
|
|
|
dictionaryName = string( &dName.front(), dName.size() );
|
|
|
|
}
|
2012-02-09 12:49:41 +00:00
|
|
|
|
|
|
|
// Initialize the index
|
|
|
|
|
|
|
|
openIndex( IndexInfo( idxHeader.indexBtreeMaxElements, idxHeader.indexRootOffset ), idx, idxMutex );
|
2014-04-16 16:18:28 +00:00
|
|
|
|
|
|
|
// Full-text search parameters
|
|
|
|
|
2022-10-06 03:04:48 +00:00
|
|
|
ftsIdxName = indexFile + Dictionary::getFtsSuffix();
|
2014-04-16 16:18:28 +00:00
|
|
|
|
2012-02-09 12:49:41 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
SdictDictionary::~SdictDictionary()
|
|
|
|
{
|
|
|
|
df.close();
|
|
|
|
}
|
|
|
|
|
2022-06-03 13:28:41 +00:00
|
|
|
void SdictDictionary::loadIcon() noexcept
|
2012-02-09 12:49:41 +00:00
|
|
|
{
|
|
|
|
if ( dictionaryIconLoaded )
|
|
|
|
return;
|
|
|
|
|
2023-04-13 10:08:32 +00:00
|
|
|
QString fileName = QDir::fromNativeSeparators( getDictionaryFilenames()[ 0 ].c_str() );
|
2012-02-09 12:49:41 +00:00
|
|
|
|
|
|
|
// Remove the extension
|
|
|
|
fileName.chop( 3 );
|
|
|
|
|
2012-12-03 12:47:43 +00:00
|
|
|
if ( !loadIconFromFile( fileName ) ) {
|
2012-02-09 12:49:41 +00:00
|
|
|
// Load failed -- use default icons
|
2023-06-19 02:34:08 +00:00
|
|
|
dictionaryIcon = QIcon( ":/icons/icon32_sdict.png" );
|
2012-02-09 12:49:41 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
dictionaryIconLoaded = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
string SdictDictionary::convert( string const & in )
|
|
|
|
{
|
2022-01-15 07:29:20 +00:00
|
|
|
// GD_DPRINTF( "Source>>>>>>>>>>: %s\n\n\n", in.c_str() );
|
2012-02-09 12:49:41 +00:00
|
|
|
|
|
|
|
string inConverted;
|
|
|
|
|
|
|
|
inConverted.reserve( in.size() );
|
|
|
|
|
|
|
|
bool afterEol = false;
|
|
|
|
|
2023-07-29 16:50:03 +00:00
|
|
|
for ( char i : in ) {
|
|
|
|
switch ( i ) {
|
2012-02-09 12:49:41 +00:00
|
|
|
case '\n':
|
|
|
|
afterEol = true;
|
|
|
|
inConverted.append( "<br/>" );
|
|
|
|
break;
|
|
|
|
|
|
|
|
case ' ':
|
|
|
|
if ( afterEol ) {
|
|
|
|
inConverted.append( " " );
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
// Fall-through
|
|
|
|
|
|
|
|
default:
|
2023-07-29 16:50:03 +00:00
|
|
|
inConverted.push_back( i );
|
2012-02-09 12:49:41 +00:00
|
|
|
afterEol = false;
|
|
|
|
}
|
2023-07-20 08:02:22 +00:00
|
|
|
}
|
2012-02-09 12:49:41 +00:00
|
|
|
|
2018-02-28 14:15:27 +00:00
|
|
|
QString result = QString::fromUtf8( inConverted.c_str(), inConverted.size() );
|
2012-02-09 12:49:41 +00:00
|
|
|
|
2013-07-14 15:37:38 +00:00
|
|
|
result.replace( QRegularExpression( "<\\s*(p|br)\\s*>", QRegularExpression::CaseInsensitiveOption ), "<br/>" );
|
|
|
|
result.remove( QRegularExpression( "<\\s*/p\\s*>", QRegularExpression::CaseInsensitiveOption ) );
|
2012-02-09 12:49:41 +00:00
|
|
|
|
2022-02-27 05:17:37 +00:00
|
|
|
result.replace( QRegularExpression( "<\\s*t\\s*>", QRegularExpression::CaseInsensitiveOption ),
|
|
|
|
R"(<span class="sdict_tr" dir="ltr">)" );
|
|
|
|
result.replace( QRegularExpression( "<\\s*f\\s*>", QRegularExpression::CaseInsensitiveOption ),
|
2018-02-21 14:43:35 +00:00
|
|
|
"<span class=\"sdict_forms\">" );
|
2022-02-27 05:17:37 +00:00
|
|
|
result.replace( QRegularExpression( "<\\s*/(t|f)\\s*>", QRegularExpression::CaseInsensitiveOption ), "</span>" );
|
2012-02-09 12:49:41 +00:00
|
|
|
|
2022-02-27 05:17:37 +00:00
|
|
|
result.replace( QRegularExpression( "<\\s*l\\s*>", QRegularExpression::CaseInsensitiveOption ), "<ul>" );
|
2022-12-24 22:01:50 +00:00
|
|
|
result.replace( QRegularExpression( "<\\s*/l\\s*>", QRegularExpression::CaseInsensitiveOption ), "</ul>" );
|
2012-02-09 12:49:41 +00:00
|
|
|
|
|
|
|
|
2013-07-14 15:37:38 +00:00
|
|
|
// Links handling
|
2023-07-20 08:02:22 +00:00
|
|
|
|
2013-07-17 14:25:09 +00:00
|
|
|
int n = 0;
|
2013-07-14 15:37:38 +00:00
|
|
|
for ( ;; ) {
|
|
|
|
QRegularExpression start_link_tag( "<\\s*r\\s*>", QRegularExpression::CaseInsensitiveOption );
|
|
|
|
QRegularExpression end_link_tag( "<\\s*/r\\s*>", QRegularExpression::CaseInsensitiveOption );
|
2023-07-20 08:02:22 +00:00
|
|
|
|
2013-07-14 15:37:38 +00:00
|
|
|
n = result.indexOf( start_link_tag, n );
|
|
|
|
if ( n < 0 )
|
|
|
|
break;
|
2012-02-09 12:49:41 +00:00
|
|
|
|
2013-07-14 15:37:38 +00:00
|
|
|
int end = result.indexOf( end_link_tag, n );
|
2023-07-20 08:02:22 +00:00
|
|
|
if ( end < 0 )
|
|
|
|
break;
|
|
|
|
|
2022-02-27 05:17:37 +00:00
|
|
|
QRegularExpressionMatch m = start_link_tag.match( result, 0, QRegularExpression::PartialPreferFirstMatch );
|
|
|
|
int tag_len = m.captured().length();
|
2013-07-14 15:37:38 +00:00
|
|
|
QString link_text = result.mid( n + tag_len, end - n - tag_len );
|
2023-07-20 08:02:22 +00:00
|
|
|
|
2022-02-27 05:17:37 +00:00
|
|
|
m = end_link_tag.match( result, 0, QRegularExpression::PartialPreferFirstMatch );
|
|
|
|
result.replace( end, m.captured().length(), "</a>" );
|
2022-12-24 22:01:50 +00:00
|
|
|
result.replace( n, tag_len, QString( R"(<a class="sdict_wordref" href="bword:)" ) + link_text + "\">" );
|
2023-07-20 08:02:22 +00:00
|
|
|
}
|
|
|
|
|
2013-07-14 15:37:38 +00:00
|
|
|
// Adjust text direction for lines
|
2023-07-20 08:02:22 +00:00
|
|
|
|
|
|
|
n = 0;
|
2013-07-14 15:37:38 +00:00
|
|
|
bool b = true;
|
|
|
|
while ( b ) {
|
|
|
|
int next = result.indexOf( "<br/>", n );
|
|
|
|
if ( next < 0 ) {
|
|
|
|
next = result.length();
|
|
|
|
b = false;
|
2023-07-20 08:02:22 +00:00
|
|
|
}
|
|
|
|
|
2013-07-17 14:25:09 +00:00
|
|
|
if ( !result.mid( n, next - n ).contains( '<' ) ) {
|
2013-07-14 15:37:38 +00:00
|
|
|
if ( Html::unescape( result.mid( n, next - n ) ).isRightToLeft() != isToLanguageRTL() ) {
|
|
|
|
result.insert( next, "</span>" );
|
2013-07-10 13:48:09 +00:00
|
|
|
result.insert( n, QString( "<span dir = \"" ) + ( isToLanguageRTL() ? "ltr" : "rtl" ) + "\">" );
|
2013-07-14 15:37:38 +00:00
|
|
|
next = result.indexOf( "<br/>", n );
|
2023-07-20 08:02:22 +00:00
|
|
|
}
|
2013-07-14 15:37:38 +00:00
|
|
|
}
|
2012-02-09 12:49:41 +00:00
|
|
|
|
2013-07-14 15:37:38 +00:00
|
|
|
n = next + 5;
|
2023-07-20 08:02:22 +00:00
|
|
|
}
|
|
|
|
|
2013-07-14 15:37:38 +00:00
|
|
|
return result.toUtf8().data();
|
2012-02-09 12:49:41 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void SdictDictionary::loadArticle( uint32_t address, string & articleText )
|
|
|
|
{
|
|
|
|
uint32_t articleOffset = address;
|
|
|
|
uint32_t articleSize;
|
|
|
|
|
|
|
|
vector< char > articleBody;
|
|
|
|
|
2014-04-16 16:18:28 +00:00
|
|
|
{
|
2023-05-29 13:56:04 +00:00
|
|
|
QMutexLocker _( &sdictMutex );
|
2014-04-16 16:18:28 +00:00
|
|
|
df.seek( articleOffset );
|
|
|
|
df.read( &articleSize, sizeof( articleSize ) );
|
|
|
|
articleBody.resize( articleSize );
|
|
|
|
df.read( &articleBody.front(), articleSize );
|
|
|
|
}
|
2012-02-09 12:49:41 +00:00
|
|
|
|
|
|
|
if ( articleBody.empty() )
|
|
|
|
throw exCantReadFile( getDictionaryFilenames()[ 0 ] );
|
|
|
|
|
|
|
|
if ( idxHeader.compressionType == 1 )
|
|
|
|
articleText = decompressZlib( articleBody.data(), articleSize );
|
|
|
|
else if ( idxHeader.compressionType == 2 )
|
|
|
|
articleText = decompressBzip2( articleBody.data(), articleSize );
|
|
|
|
else
|
|
|
|
articleText = string( articleBody.data(), articleSize );
|
|
|
|
|
|
|
|
articleText = convert( articleText );
|
2013-07-14 15:37:38 +00:00
|
|
|
|
|
|
|
string div = "<div class=\"sdict\"";
|
|
|
|
if ( isToLanguageRTL() )
|
|
|
|
div += " dir=\"rtl\"";
|
|
|
|
div += ">";
|
|
|
|
|
|
|
|
articleText.insert( 0, div );
|
|
|
|
articleText.append( "</div>" );
|
2012-02-09 12:49:41 +00:00
|
|
|
}
|
|
|
|
|
2014-04-16 16:18:28 +00:00
|
|
|
void SdictDictionary::makeFTSIndex( QAtomicInt & isCancelled, bool firstIteration )
|
|
|
|
{
|
2023-06-03 00:29:19 +00:00
|
|
|
if ( !( Dictionary::needToRebuildIndex( getDictionaryFilenames(), ftsIdxName )
|
|
|
|
|| FtsHelpers::ftsIndexIsOldOrBad( this ) ) )
|
2014-04-16 16:18:28 +00:00
|
|
|
FTS_index_completed.ref();
|
|
|
|
|
2023-06-03 00:29:19 +00:00
|
|
|
if ( haveFTSIndex() )
|
2014-04-16 16:18:28 +00:00
|
|
|
return;
|
|
|
|
|
|
|
|
if ( ensureInitDone().size() )
|
|
|
|
return;
|
|
|
|
|
|
|
|
if ( firstIteration && getArticleCount() > FTS::MaxDictionarySizeForFastSearch )
|
|
|
|
return;
|
|
|
|
|
|
|
|
gdDebug( "SDict: Building the full-text index for dictionary: %s\n", getName().c_str() );
|
|
|
|
|
|
|
|
try {
|
|
|
|
FtsHelpers::makeFTSIndex( this, isCancelled );
|
2014-04-17 14:31:51 +00:00
|
|
|
FTS_index_completed.ref();
|
2014-04-16 16:18:28 +00:00
|
|
|
}
|
|
|
|
catch ( std::exception & ex ) {
|
|
|
|
gdWarning( "SDict: Failed building full-text search index for \"%s\", reason: %s\n", getName().c_str(), ex.what() );
|
2023-04-13 10:08:32 +00:00
|
|
|
QFile::remove( ftsIdxName.c_str() );
|
2014-04-16 16:18:28 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void SdictDictionary::getArticleText( uint32_t articleAddress, QString & headword, QString & text )
|
|
|
|
{
|
|
|
|
try {
|
|
|
|
string articleStr;
|
|
|
|
headword.clear();
|
|
|
|
text.clear();
|
|
|
|
|
|
|
|
loadArticle( articleAddress, articleStr );
|
|
|
|
|
|
|
|
try {
|
2023-04-25 00:28:40 +00:00
|
|
|
text = Html::unescape( QString::fromStdString( articleStr ) );
|
2014-04-16 16:18:28 +00:00
|
|
|
}
|
|
|
|
catch ( std::exception & ) {
|
|
|
|
}
|
|
|
|
}
|
|
|
|
catch ( std::exception & ex ) {
|
|
|
|
gdWarning( "SDict: Failed retrieving article from \"%s\", reason: %s\n", getName().c_str(), ex.what() );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-30 23:42:31 +00:00
|
|
|
sptr< Dictionary::DataRequest >
|
|
|
|
SdictDictionary::getSearchResults( QString const & searchString, int searchMode, bool matchCase, bool ignoreDiacritics )
|
2014-04-16 16:18:28 +00:00
|
|
|
{
|
2023-05-30 23:42:31 +00:00
|
|
|
return std::make_shared< FtsHelpers::FTSResultsRequest >( *this,
|
|
|
|
searchString,
|
|
|
|
searchMode,
|
|
|
|
matchCase,
|
|
|
|
ignoreDiacritics );
|
2014-04-16 16:18:28 +00:00
|
|
|
}
|
|
|
|
|
2012-02-09 12:49:41 +00:00
|
|
|
/// SdictDictionary::getArticle()
|
|
|
|
|
|
|
|
|
|
|
|
class SdictArticleRequest: public Dictionary::DataRequest
|
|
|
|
{
|
|
|
|
|
|
|
|
wstring word;
|
|
|
|
vector< wstring > alts;
|
|
|
|
SdictDictionary & dict;
|
2018-06-13 16:00:42 +00:00
|
|
|
bool ignoreDiacritics;
|
2012-02-09 12:49:41 +00:00
|
|
|
|
|
|
|
QAtomicInt isCancelled;
|
2023-04-29 04:12:49 +00:00
|
|
|
|
|
|
|
QFuture< void > f;
|
2012-02-09 12:49:41 +00:00
|
|
|
|
|
|
|
public:
|
|
|
|
|
|
|
|
SdictArticleRequest( wstring const & word_,
|
|
|
|
vector< wstring > const & alts_,
|
2018-06-13 16:00:42 +00:00
|
|
|
SdictDictionary & dict_,
|
|
|
|
bool ignoreDiacritics_ ):
|
|
|
|
word( word_ ),
|
|
|
|
alts( alts_ ),
|
|
|
|
dict( dict_ ),
|
|
|
|
ignoreDiacritics( ignoreDiacritics_ )
|
2012-02-09 12:49:41 +00:00
|
|
|
{
|
2023-04-29 04:12:49 +00:00
|
|
|
f = QtConcurrent::run( [ this ]() {
|
|
|
|
this->run();
|
|
|
|
} );
|
2012-02-09 12:49:41 +00:00
|
|
|
}
|
|
|
|
|
2023-05-08 08:41:54 +00:00
|
|
|
void run();
|
2012-02-09 12:49:41 +00:00
|
|
|
|
2022-12-29 07:07:40 +00:00
|
|
|
void cancel() override
|
2012-02-09 12:49:41 +00:00
|
|
|
{
|
|
|
|
isCancelled.ref();
|
|
|
|
}
|
|
|
|
|
|
|
|
~SdictArticleRequest()
|
|
|
|
{
|
|
|
|
isCancelled.ref();
|
2023-04-29 04:12:49 +00:00
|
|
|
f.waitForFinished();
|
2012-02-09 12:49:41 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
void SdictArticleRequest::run()
|
|
|
|
{
|
2021-11-27 07:17:33 +00:00
|
|
|
if ( Utils::AtomicInt::loadAcquire( isCancelled ) ) {
|
2012-02-09 12:49:41 +00:00
|
|
|
finish();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2018-06-13 16:00:42 +00:00
|
|
|
vector< WordArticleLink > chain = dict.findArticles( word, ignoreDiacritics );
|
2012-02-09 12:49:41 +00:00
|
|
|
|
2023-07-29 16:50:03 +00:00
|
|
|
for ( const auto & alt : alts ) {
|
2012-02-09 12:49:41 +00:00
|
|
|
/// Make an additional query for each alt
|
|
|
|
|
2023-07-29 16:50:03 +00:00
|
|
|
vector< WordArticleLink > altChain = dict.findArticles( alt, ignoreDiacritics );
|
2012-02-09 12:49:41 +00:00
|
|
|
|
|
|
|
chain.insert( chain.end(), altChain.begin(), altChain.end() );
|
|
|
|
}
|
|
|
|
|
|
|
|
multimap< wstring, pair< string, string > > mainArticles, alternateArticles;
|
|
|
|
|
|
|
|
set< uint32_t > articlesIncluded; // Some synonims make it that the articles
|
|
|
|
// appear several times. We combat this
|
|
|
|
// by only allowing them to appear once.
|
|
|
|
|
|
|
|
wstring wordCaseFolded = Folding::applySimpleCaseOnly( word );
|
2018-06-13 16:00:42 +00:00
|
|
|
if ( ignoreDiacritics )
|
|
|
|
wordCaseFolded = Folding::applyDiacriticsOnly( wordCaseFolded );
|
2012-02-09 12:49:41 +00:00
|
|
|
|
2023-07-29 16:50:03 +00:00
|
|
|
for ( auto & x : chain ) {
|
2021-11-27 07:17:33 +00:00
|
|
|
if ( Utils::AtomicInt::loadAcquire( isCancelled ) ) {
|
2012-02-09 12:49:41 +00:00
|
|
|
finish();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2023-07-29 16:50:03 +00:00
|
|
|
if ( articlesIncluded.find( x.articleOffset ) != articlesIncluded.end() )
|
2012-02-09 12:49:41 +00:00
|
|
|
continue; // We already have this article in the body.
|
|
|
|
|
|
|
|
// Now grab that article
|
|
|
|
|
|
|
|
string headword, articleText;
|
|
|
|
|
2023-07-29 16:50:03 +00:00
|
|
|
headword = x.word;
|
2012-02-09 12:49:41 +00:00
|
|
|
|
2013-09-24 13:56:47 +00:00
|
|
|
try {
|
2023-07-29 16:50:03 +00:00
|
|
|
dict.loadArticle( x.articleOffset, articleText );
|
2013-09-24 13:56:47 +00:00
|
|
|
|
|
|
|
// Ok. Now, does it go to main articles, or to alternate ones? We list
|
|
|
|
// main ones first, and alternates after.
|
2012-02-09 12:49:41 +00:00
|
|
|
|
2013-09-24 13:56:47 +00:00
|
|
|
// We do the case-folded comparison here.
|
2012-02-09 12:49:41 +00:00
|
|
|
|
2023-04-29 02:35:56 +00:00
|
|
|
wstring headwordStripped = Folding::applySimpleCaseOnly( headword );
|
2018-06-13 16:00:42 +00:00
|
|
|
if ( ignoreDiacritics )
|
|
|
|
headwordStripped = Folding::applyDiacriticsOnly( headwordStripped );
|
2012-02-09 12:49:41 +00:00
|
|
|
|
2013-09-24 13:56:47 +00:00
|
|
|
multimap< wstring, pair< string, string > > & mapToUse =
|
|
|
|
( wordCaseFolded == headwordStripped ) ? mainArticles : alternateArticles;
|
2012-02-09 12:49:41 +00:00
|
|
|
|
2023-04-29 02:35:56 +00:00
|
|
|
mapToUse.insert( pair( Folding::applySimpleCaseOnly( headword ), pair( headword, articleText ) ) );
|
2012-02-09 12:49:41 +00:00
|
|
|
|
2023-07-29 16:50:03 +00:00
|
|
|
articlesIncluded.insert( x.articleOffset );
|
2013-09-24 13:56:47 +00:00
|
|
|
}
|
|
|
|
catch ( std::exception & ex ) {
|
2013-11-16 18:34:09 +00:00
|
|
|
gdWarning( "SDict: Failed loading article from \"%s\", reason: %s\n", dict.getName().c_str(), ex.what() );
|
2013-09-24 13:56:47 +00:00
|
|
|
}
|
2012-02-09 12:49:41 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if ( mainArticles.empty() && alternateArticles.empty() ) {
|
|
|
|
// No such word
|
|
|
|
finish();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
string result;
|
|
|
|
|
|
|
|
multimap< wstring, pair< string, string > >::const_iterator i;
|
|
|
|
|
|
|
|
for ( i = mainArticles.begin(); i != mainArticles.end(); ++i ) {
|
2013-07-10 13:48:09 +00:00
|
|
|
result += dict.isFromLanguageRTL() ? "<h3 dir=\"rtl\">" : "<h3>";
|
2012-02-09 12:49:41 +00:00
|
|
|
result += i->second.first;
|
|
|
|
result += "</h3>";
|
|
|
|
result += i->second.second;
|
|
|
|
}
|
|
|
|
|
|
|
|
for ( i = alternateArticles.begin(); i != alternateArticles.end(); ++i ) {
|
2013-07-10 13:48:09 +00:00
|
|
|
result += dict.isFromLanguageRTL() ? "<h3 dir=\"rtl\">" : "<h3>";
|
2012-02-09 12:49:41 +00:00
|
|
|
result += i->second.first;
|
|
|
|
result += "</h3>";
|
2013-07-10 13:48:09 +00:00
|
|
|
if ( dict.isToLanguageRTL() )
|
|
|
|
result += "<span dir=\"rtl\">";
|
2012-02-09 12:49:41 +00:00
|
|
|
result += i->second.second;
|
2013-07-10 13:48:09 +00:00
|
|
|
if ( dict.isToLanguageRTL() )
|
|
|
|
result += "</span>";
|
2012-02-09 12:49:41 +00:00
|
|
|
}
|
|
|
|
|
2023-06-23 15:09:31 +00:00
|
|
|
appendString( result );
|
2012-02-09 12:49:41 +00:00
|
|
|
|
|
|
|
hasAnyData = true;
|
|
|
|
|
|
|
|
finish();
|
|
|
|
}
|
|
|
|
|
|
|
|
sptr< Dictionary::DataRequest > SdictDictionary::getArticle( wstring const & word,
|
|
|
|
vector< wstring > const & alts,
|
2018-06-13 16:00:42 +00:00
|
|
|
wstring const &,
|
|
|
|
bool ignoreDiacritics )
|
2023-07-20 08:02:22 +00:00
|
|
|
|
2012-02-09 12:49:41 +00:00
|
|
|
{
|
2022-11-29 03:54:31 +00:00
|
|
|
return std::make_shared< SdictArticleRequest >( word, alts, *this, ignoreDiacritics );
|
2012-02-09 12:49:41 +00:00
|
|
|
}
|
|
|
|
|
2014-06-30 13:44:16 +00:00
|
|
|
QString const & SdictDictionary::getDescription()
|
|
|
|
{
|
|
|
|
if ( !dictionaryDescription.isEmpty() )
|
|
|
|
return dictionaryDescription;
|
|
|
|
|
2023-05-07 12:47:41 +00:00
|
|
|
dictionaryDescription = QObject::tr( "Title: %1%2" ).arg( QString::fromUtf8( getName().c_str() ) ).arg( "\n\n" );
|
2014-06-30 13:44:16 +00:00
|
|
|
|
|
|
|
try {
|
2023-05-29 13:56:04 +00:00
|
|
|
QMutexLocker _( &sdictMutex );
|
2014-06-30 13:44:16 +00:00
|
|
|
|
|
|
|
DCT_header dictHeader;
|
|
|
|
|
|
|
|
df.seek( 0 );
|
|
|
|
if ( df.readRecords( &dictHeader, sizeof( dictHeader ), 1 ) != 1 )
|
|
|
|
throw exCantReadFile( getDictionaryFilenames()[ 0 ] );
|
|
|
|
|
|
|
|
int compression = dictHeader.compression & 0x0F;
|
|
|
|
|
|
|
|
vector< char > data;
|
|
|
|
uint32_t size;
|
|
|
|
string str;
|
|
|
|
|
|
|
|
df.seek( dictHeader.copyrightOffset );
|
|
|
|
df.read( &size, sizeof( size ) );
|
|
|
|
data.resize( size );
|
|
|
|
df.read( &data.front(), size );
|
|
|
|
|
|
|
|
if ( compression == 1 )
|
|
|
|
str = decompressZlib( data.data(), size );
|
|
|
|
else if ( compression == 2 )
|
|
|
|
str = decompressBzip2( data.data(), size );
|
|
|
|
else
|
|
|
|
str = string( data.data(), size );
|
|
|
|
|
2023-05-07 12:47:41 +00:00
|
|
|
dictionaryDescription +=
|
2017-03-16 15:20:36 +00:00
|
|
|
QObject::tr( "Copyright: %1%2" ).arg( QString::fromUtf8( str.c_str(), str.size() ) ).arg( "\n\n" );
|
2014-06-30 13:44:16 +00:00
|
|
|
|
|
|
|
df.seek( dictHeader.versionOffset );
|
|
|
|
df.read( &size, sizeof( size ) );
|
|
|
|
data.resize( size );
|
|
|
|
df.read( &data.front(), size );
|
|
|
|
|
|
|
|
if ( compression == 1 )
|
|
|
|
str = decompressZlib( data.data(), size );
|
|
|
|
else if ( compression == 2 )
|
|
|
|
str = decompressBzip2( data.data(), size );
|
|
|
|
else
|
|
|
|
str = string( data.data(), size );
|
|
|
|
|
2023-05-07 12:47:41 +00:00
|
|
|
dictionaryDescription +=
|
2017-03-16 15:20:36 +00:00
|
|
|
QObject::tr( "Version: %1%2" ).arg( QString::fromUtf8( str.c_str(), str.size() ) ).arg( "\n\n" );
|
2014-06-30 13:44:16 +00:00
|
|
|
}
|
|
|
|
catch ( std::exception & ex ) {
|
|
|
|
gdWarning( "SDict: Failed description reading for \"%s\", reason: %s\n", getName().c_str(), ex.what() );
|
|
|
|
}
|
|
|
|
|
|
|
|
if ( dictionaryDescription.isEmpty() )
|
|
|
|
dictionaryDescription = "NONE";
|
|
|
|
|
|
|
|
return dictionaryDescription;
|
|
|
|
}
|
|
|
|
|
2012-02-09 12:49:41 +00:00
|
|
|
} // anonymous namespace
|
|
|
|
|
|
|
|
vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & fileNames,
|
|
|
|
string const & indicesDir,
|
|
|
|
Dictionary::Initializing & initializing )
|
2023-07-20 08:02:22 +00:00
|
|
|
|
2012-02-09 12:49:41 +00:00
|
|
|
{
|
|
|
|
vector< sptr< Dictionary::Class > > dictionaries;
|
|
|
|
|
2023-07-29 16:50:03 +00:00
|
|
|
for ( const auto & fileName : fileNames ) {
|
2012-02-09 12:49:41 +00:00
|
|
|
// Skip files with the extensions different to .dct to speed up the
|
|
|
|
// scanning
|
2023-09-02 04:23:13 +00:00
|
|
|
if ( !Utils::endsWithIgnoreCase( fileName, ".dct" ) )
|
2012-02-09 12:49:41 +00:00
|
|
|
continue;
|
|
|
|
|
|
|
|
// Got the file -- check if we need to rebuid the index
|
|
|
|
|
2023-07-29 16:50:03 +00:00
|
|
|
vector< string > dictFiles( 1, fileName );
|
2012-02-09 12:49:41 +00:00
|
|
|
|
|
|
|
string dictId = Dictionary::makeDictionaryId( dictFiles );
|
|
|
|
|
|
|
|
string indexFile = indicesDir + dictId;
|
|
|
|
|
|
|
|
if ( Dictionary::needToRebuildIndex( dictFiles, indexFile ) || indexIsOldOrBad( indexFile ) ) {
|
|
|
|
try {
|
2023-07-29 16:50:03 +00:00
|
|
|
gdDebug( "SDict: Building the index for dictionary: %s\n", fileName.c_str() );
|
2013-09-20 14:25:44 +00:00
|
|
|
|
2023-07-29 16:50:03 +00:00
|
|
|
File::Class df( fileName, "rb" );
|
2012-02-09 12:49:41 +00:00
|
|
|
|
|
|
|
DCT_header dictHeader;
|
|
|
|
|
|
|
|
df.read( &dictHeader, sizeof( dictHeader ) );
|
|
|
|
if ( strncmp( dictHeader.signature, "sdct", 4 ) ) {
|
2023-07-29 16:50:03 +00:00
|
|
|
gdWarning( "File \"%s\" is not valid SDictionary file", fileName.c_str() );
|
2012-02-09 12:49:41 +00:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
int compression = dictHeader.compression & 0x0F;
|
|
|
|
|
|
|
|
vector< char > data;
|
|
|
|
uint32_t size;
|
|
|
|
|
|
|
|
df.seek( dictHeader.titleOffset );
|
|
|
|
df.read( &size, sizeof( size ) );
|
|
|
|
data.resize( size );
|
|
|
|
df.read( &data.front(), size );
|
|
|
|
|
|
|
|
string dictName;
|
|
|
|
|
|
|
|
if ( compression == 1 )
|
|
|
|
dictName = decompressZlib( data.data(), size );
|
|
|
|
else if ( compression == 2 )
|
|
|
|
dictName = decompressBzip2( data.data(), size );
|
|
|
|
else
|
|
|
|
dictName = string( data.data(), size );
|
|
|
|
|
|
|
|
initializing.indexingDictionary( dictName );
|
|
|
|
|
|
|
|
File::Class idx( indexFile, "wb" );
|
|
|
|
IdxHeader idxHeader;
|
|
|
|
memset( &idxHeader, 0, sizeof( idxHeader ) );
|
|
|
|
|
|
|
|
// We write a dummy header first. At the end of the process the header
|
|
|
|
// will be rewritten with the right values.
|
|
|
|
|
|
|
|
idx.write( idxHeader );
|
|
|
|
|
|
|
|
idx.write( (uint32_t)dictName.size() );
|
|
|
|
idx.write( dictName.data(), dictName.size() );
|
|
|
|
|
|
|
|
IndexedWords indexedWords;
|
|
|
|
|
|
|
|
ChunkedStorage::Writer chunks( idx );
|
|
|
|
|
|
|
|
uint32_t wordCount = 0;
|
|
|
|
set< uint32_t > articleOffsets;
|
|
|
|
uint32_t pos = dictHeader.fullIndexOffset;
|
|
|
|
|
|
|
|
for ( uint32_t j = 0; j < dictHeader.wordCount; j++ ) {
|
|
|
|
IndexElement el;
|
|
|
|
df.seek( pos );
|
|
|
|
df.read( &el, sizeof( el ) );
|
|
|
|
uint32_t articleOffset = dictHeader.articlesOffset + el.articleOffset;
|
|
|
|
size = el.nextWord - sizeof( el );
|
2012-10-31 13:58:35 +00:00
|
|
|
if ( el.nextWord < sizeof( el ) )
|
2012-02-09 12:49:41 +00:00
|
|
|
break;
|
|
|
|
wordCount++;
|
|
|
|
data.resize( size );
|
|
|
|
df.read( &data.front(), size );
|
|
|
|
|
|
|
|
if ( articleOffsets.find( articleOffset ) == articleOffsets.end() )
|
|
|
|
articleOffsets.insert( articleOffset );
|
|
|
|
|
|
|
|
// Insert new entry
|
|
|
|
|
|
|
|
indexedWords.addWord( Utf8::decode( string( data.data(), size ) ), articleOffset );
|
|
|
|
|
|
|
|
pos += el.nextWord;
|
|
|
|
}
|
|
|
|
// Finish with the chunks
|
|
|
|
|
|
|
|
idxHeader.chunksOffset = chunks.finish();
|
|
|
|
|
|
|
|
// Build index
|
|
|
|
|
|
|
|
IndexInfo idxInfo = BtreeIndexing::buildIndex( indexedWords, idx );
|
|
|
|
|
|
|
|
idxHeader.indexBtreeMaxElements = idxInfo.btreeMaxElements;
|
|
|
|
idxHeader.indexRootOffset = idxInfo.rootOffset;
|
|
|
|
|
|
|
|
indexedWords.clear(); // Release memory -- no need for this data
|
|
|
|
|
|
|
|
// That concludes it. Update the header.
|
|
|
|
|
|
|
|
idxHeader.signature = Signature;
|
|
|
|
idxHeader.formatVersion = CurrentFormatVersion;
|
|
|
|
|
|
|
|
idxHeader.articleCount = articleOffsets.size();
|
|
|
|
idxHeader.wordCount = wordCount;
|
|
|
|
|
|
|
|
idxHeader.langFrom = LangCoder::code2toInt( dictHeader.inputLang );
|
|
|
|
idxHeader.langTo = LangCoder::code2toInt( dictHeader.outputLang );
|
|
|
|
idxHeader.compressionType = compression;
|
|
|
|
|
|
|
|
idx.rewind();
|
|
|
|
|
|
|
|
idx.write( &idxHeader, sizeof( idxHeader ) );
|
2014-04-25 13:13:56 +00:00
|
|
|
}
|
|
|
|
catch ( std::exception & e ) {
|
2023-07-29 16:50:03 +00:00
|
|
|
gdWarning( "Sdictionary dictionary indexing failed: %s, error: %s\n", fileName.c_str(), e.what() );
|
2014-04-25 13:13:56 +00:00
|
|
|
continue;
|
|
|
|
}
|
2012-02-09 12:49:41 +00:00
|
|
|
catch ( ... ) {
|
2013-09-20 14:25:44 +00:00
|
|
|
qWarning( "Sdictionary dictionary indexing failed\n" );
|
2012-02-09 12:49:41 +00:00
|
|
|
continue;
|
2023-07-20 08:02:22 +00:00
|
|
|
}
|
2012-02-09 12:49:41 +00:00
|
|
|
} // if need to rebuild
|
2023-07-20 08:02:22 +00:00
|
|
|
try {
|
2022-11-29 03:54:31 +00:00
|
|
|
dictionaries.push_back( std::make_shared< SdictDictionary >( dictId, indexFile, dictFiles ) );
|
2023-07-20 08:02:22 +00:00
|
|
|
}
|
2012-02-09 12:49:41 +00:00
|
|
|
catch ( std::exception & e ) {
|
2023-07-29 16:50:03 +00:00
|
|
|
gdWarning( "Sdictionary dictionary initializing failed: %s, error: %s\n", fileName.c_str(), e.what() );
|
2023-07-20 08:02:22 +00:00
|
|
|
}
|
2012-02-09 12:49:41 +00:00
|
|
|
}
|
|
|
|
return dictionaries;
|
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace Sdict
|