mirror of
https://github.com/xiaoyifang/goldendict-ng.git
synced 2024-11-27 23:34:06 +00:00
Merge pull request #93 from xiaoyifang/feature/0606string-convert-replace
opt: replace string() with QString::toStdstring
This commit is contained in:
commit
ccd8ace0ec
4
aard.cc
4
aard.cc
|
@ -429,7 +429,7 @@ void AardDictionary::loadArticle( quint32 address,
|
|||
|
||||
while( 1 )
|
||||
{
|
||||
articleText = string( QObject::tr( "Article loading error" ).toUtf8().constData() );
|
||||
articleText = QObject::tr( "Article loading error" ).toStdString();
|
||||
try
|
||||
{
|
||||
Mutex::Lock _( aardMutex );
|
||||
|
@ -521,7 +521,7 @@ void AardDictionary::loadArticle( quint32 address,
|
|||
articleText = convert( articleText );
|
||||
}
|
||||
else
|
||||
articleText = string( QObject::tr( "Article decoding error" ).toUtf8().constData() );
|
||||
articleText = QObject::tr( "Article decoding error" ).toStdString();
|
||||
|
||||
// See Issue #271: A mechanism to clean-up invalid HTML cards.
|
||||
string cleaner = "</font>""</font>""</font>""</font>""</font>""</font>"
|
||||
|
|
2
dsl.cc
2
dsl.cc
|
@ -1697,7 +1697,7 @@ void DslArticleRequest::run()
|
|||
{
|
||||
gdWarning( "DSL: Failed loading article from \"%s\", reason: %s\n", dict.getName().c_str(), ex.what() );
|
||||
articleText = string( "<span class=\"dsl_article\">" )
|
||||
+ string( QObject::tr( "Article loading error" ).toUtf8().constData() )
|
||||
+ QObject::tr( "Article loading error" ).toStdString()
|
||||
+ "</span>";
|
||||
}
|
||||
|
||||
|
|
|
@ -173,7 +173,7 @@ public:
|
|||
{
|
||||
QString s = QString::fromUtf8( article.c_str() );
|
||||
substituteStylesheet( s, styleSheets );
|
||||
return string( s.toUtf8().constData() );
|
||||
return s.toStdString();
|
||||
}
|
||||
|
||||
protected:
|
||||
|
|
14
mdx.cc
14
mdx.cc
|
@ -977,7 +977,7 @@ void MdxDictionary::loadArticle( uint32_t offset, string & articleText, bool noF
|
|||
if( !noFilter )
|
||||
article = filterResource( articleId, article );
|
||||
|
||||
articleText = string( article.toUtf8().constData() );
|
||||
articleText = article.toStdString();
|
||||
}
|
||||
|
||||
QString & MdxDictionary::filterResource( QString const & articleId, QString & article )
|
||||
|
@ -1409,7 +1409,7 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
|
|||
if ( !parser.open( i->c_str() ) )
|
||||
continue;
|
||||
|
||||
string title = string( parser.title().toUtf8().constData() );
|
||||
string title = parser.title().toStdString();
|
||||
initializing.indexingDictionary( title );
|
||||
|
||||
for ( vector< string >::const_iterator mddIter = dictFiles.begin() + 1;
|
||||
|
@ -1440,7 +1440,7 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
|
|||
|
||||
// then the encoding
|
||||
{
|
||||
string encoding = string( parser.encoding().toUtf8().constData() );
|
||||
string encoding = parser.encoding().toStdString();
|
||||
idx.write< uint32_t >( encoding.size() );
|
||||
idx.write( encoding.data(), encoding.size() );
|
||||
}
|
||||
|
@ -1457,7 +1457,7 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
|
|||
|
||||
// Save dictionary description if there's one
|
||||
{
|
||||
string description = string( parser.description().toUtf8().constData() );
|
||||
string description = parser.description().toStdString();
|
||||
idxHeader.descriptionAddress = chunks.startNewBlock();
|
||||
chunks.addToBlock( description.c_str(), description.size() + 1 );
|
||||
idxHeader.descriptionSize = description.size() + 1;
|
||||
|
@ -1491,7 +1491,7 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
|
|||
mddIndices.push_back( mddIndexedWords );
|
||||
// Save filename for .mdd files only
|
||||
QFileInfo fi( mddParser->filename() );
|
||||
mddFileNames.push_back( string( fi.fileName().toUtf8().constData() ) );
|
||||
mddFileNames.push_back( fi.fileName().toStdString() );
|
||||
mddParsers.pop_front();
|
||||
}
|
||||
|
||||
|
@ -1514,8 +1514,8 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
|
|||
for ( MdictParser::StyleSheets::const_iterator iter = styleSheets.begin();
|
||||
iter != styleSheets.end(); ++iter )
|
||||
{
|
||||
string styleBegin( iter->second.first.toUtf8().constData() );
|
||||
string styleEnd( iter->second.second.toUtf8().constData() );
|
||||
string styleBegin(iter->second.first.toStdString());
|
||||
string styleEnd( iter->second.second.toStdString() );
|
||||
|
||||
// key
|
||||
idx.write<qint32>( iter->first );
|
||||
|
|
6
slob.cc
6
slob.cc
|
@ -694,12 +694,12 @@ SlobDictionary::SlobDictionary( string const & id,
|
|||
|
||||
// Read dictionary name
|
||||
|
||||
dictionaryName = string( sf.getDictionaryName().toUtf8().constData() );
|
||||
dictionaryName = sf.getDictionaryName().toStdString();
|
||||
if( dictionaryName.empty() )
|
||||
{
|
||||
QString name = QDir::fromNativeSeparators( FsEncoding::decode( dictionaryFiles[ 0 ].c_str() ) );
|
||||
int n = name.lastIndexOf( '/' );
|
||||
dictionaryName = string( name.mid( n + 1 ).toUtf8().constData() );
|
||||
dictionaryName = name.mid( n + 1 ).toStdString();
|
||||
}
|
||||
|
||||
// Full-text search parameters
|
||||
|
@ -799,7 +799,7 @@ void SlobDictionary::loadArticle( quint32 address,
|
|||
articleText = convert( articleText, entry );
|
||||
}
|
||||
else
|
||||
articleText = string( QObject::tr( "Article decoding error" ).toUtf8().constData() );
|
||||
articleText = QObject::tr( "Article decoding error" ).toStdString();
|
||||
|
||||
// See Issue #271: A mechanism to clean-up invalid HTML cards.
|
||||
string cleaner = "</font>""</font>""</font>""</font>""</font>""</font>"
|
||||
|
|
2
zim.cc
2
zim.cc
|
@ -769,7 +769,7 @@ ZimDictionary::ZimDictionary( string const & id,
|
|||
{
|
||||
QString name = QDir::fromNativeSeparators( FsEncoding::decode( dictionaryFiles[ 0 ].c_str() ) );
|
||||
int n = name.lastIndexOf( '/' );
|
||||
dictionaryName = string( name.mid( n + 1 ).toUtf8().constData() );
|
||||
dictionaryName = name.mid( n + 1 ).toStdString();
|
||||
}
|
||||
else
|
||||
{
|
||||
|
|
Loading…
Reference in a new issue