fix: merge conflict

This commit is contained in:
YiFang Xiao 2023-07-31 09:48:15 +08:00
commit 28ee5248de
127 changed files with 1959 additions and 44746 deletions

View file

@ -40,7 +40,7 @@ jobs:
uses: mathieudutour/github-tag-action@v6.0
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
custom_tag: ${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.release_hm }}.${{ steps.vars.outputs.sha_short }}
custom_tag: ${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
- name: Bump version and push tag

View file

@ -15,27 +15,19 @@ on:
permissions:
contents: read
jobs:
format:
clang-format:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3.5.2
with:
# check out HEAD on the branch
# ref: ${{ github.head_ref }}
# repository: ${{github.event.pull_request.head.repo.full_name}}
# make sure the parent commit is grabbed as well, because
# that's what will get formatted (i.e. the most recent commit)
fetch-depth: 2
# format the latest commit
- name: ubuntu install clang-format
# if: ${{ github.event.pull_request.head.repo.full_name == 'xiaoyifang/goldendict-ng' }}
id: clang-format
run: |
sudo apt-get install git lsb-release wget software-properties-common
wget -qO - https://apt.llvm.org/llvm.sh| sudo bash
sudo apt-get update
sudo apt-get install -y clang-format
git-clang-format --style=file HEAD^
eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"
brew install clang-format
(/home/linuxbrew/.linuxbrew/opt/clang-format/bin/git-clang-format --binary=/home/linuxbrew/.linuxbrew/opt/clang-format/bin/clang-format --style=file HEAD^) || true
- uses: autofix-ci/action@89762f9c25dd85f6b78cd40e521232e403357ec0

View file

@ -22,7 +22,7 @@ jobs:
strategy:
matrix:
os: [macos-13]
qt_ver: [ 6.4.3,6.5.1 ]
qt_ver: [ 6.4.3,6.5.2 ]
qt_arch: [clang_64]
env:
targetName: GoldenDict
@ -203,7 +203,7 @@ jobs:
file: ${{ env.targetName }}.dmg
asset_name: ${{ matrix.qt_ver }}-${{ env.targetName }}_${{ matrix.os }}_homebrew_breakpad_${{steps.vars.outputs.release_date}}.dmg
tag: tag-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
tag: v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
overwrite: true
release_name: GoldenDict-ng-v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.release_hm }}.${{ steps.vars.outputs.sha_short }}
prerelease: ${{env.prerelease}}

View file

@ -202,7 +202,7 @@ jobs:
file: ${{ env.targetName }}.dmg
asset_name: ${{ matrix.qt_ver }}-${{ env.targetName }}_${{ matrix.os }}_homebrew_${{steps.vars.outputs.release_date}}.dmg
tag: tag-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
tag: v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
overwrite: true
release_name: GoldenDict-ng-v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.release_hm }}.${{ steps.vars.outputs.sha_short }}
prerelease: ${{env.prerelease}}

View file

@ -26,7 +26,7 @@ jobs:
strategy:
matrix:
os: [ubuntu-20.04]
qt_ver: [ 6.5.1 ]
qt_ver: [ 6.5.2 ]
qt_arch: [gcc_64]
env:
version: 23.07.23
@ -44,20 +44,27 @@ jobs:
- name: ubuntu install thirdparty dependencies
run: |
sudo apt-get install git pkg-config build-essential
sudo apt-get install libvorbis-dev zlib1g-dev libhunspell-dev x11proto-record-dev
sudo apt-get install libxtst-dev liblzo2-dev libbz2-dev
sudo apt-get install libavutil-dev libavformat-dev libeb16-dev
sudo apt-get install doxygen libzstd-dev libxkbcommon-dev libgstreamer-plugins-base1.0-0 libgstreamer-gl1.0-0
sudo apt-get install libxkbcommon-x11-dev libspeechd-dev
sudo apt install libfuse2
sudo add-apt-repository --yes --update ppa:kiwixteam/release
sudo apt-get install build-essential \
libvorbis-dev zlib1g-dev libhunspell-dev x11proto-record-dev \
libxtst-dev liblzo2-dev libbz2-dev \
libavutil-dev libavformat-dev libeb16-dev \
libxkbcommon-dev libgstreamer-plugins-base1.0-0 libgstreamer-gl1.0-0 \
libxkbcommon-x11-dev libspeechd-dev \
libfuse2 libxapian-dev libzim-dev ninja-build
pip3 install cmake
sudo ln -sf /usr/bin/x86_64-linux-gnu-ld.gold /usr/bin/ld
#build opencc
# build opencc
git clone https://github.com/BYVoid/OpenCC
cd OpenCC/
make PREFIX=/usr -j$(nproc)
sudo make install
cmake -S . -B build_dir -G Ninja\
-DCMAKE_INSTALL_PREFIX=/usr/ \
-DCMAKE_BUILD_TYPE=Release
cmake --build build_dir
sudo cmake --install ./build_dir/
cd ..
# wget https://oligarchy.co.uk/xapian/1.4.22/xapian-core-1.4.22.tar.xz
@ -67,33 +74,21 @@ jobs:
# make PREFIX=/usr
# sudo make install
# cd ..
sudo apt install libxapian-dev
sudo add-apt-repository --yes --update ppa:kiwixteam/release
sudo apt install libzim-dev
- uses: actions/checkout@v3
with:
fetch-depth: 0
submodules: true
- name: version-file
shell: bash
env:
VAR_SUFFIX: ${{env.version-suffix}}
VAR_VERSION: ${{env.version}}
run: |
current_tag=$(git rev-parse --short=8 HEAD)
release_date=$(date +'%Y%m%d')
echo "$VAR_VERSION-$VAR_SUFFIX.$release_date.$current_tag">version.txt
cat version.txt
echo "$version"
- name: build goldendict
run: |
qmake CONFIG+=release CONFIG+=use_xapian PREFIX=/usr CONFIG+=zim_support CONFIG+=chinese_conversion_support CONFIG+=use_iconv
make INSTALL_ROOT=appdir -j`nproc` install; find appdir/
cmake -S . -B build_dir -G Ninja \
-DCMAKE_BUILD_TYPE=Release
cmake --build build_dir
cmake --install ./build_dir --prefix=appdir/usr
find appdir/
ls -al appdir
- name: Build AppImage
run: |
# for /usr/lib/qt6/plugins/platforminputcontexts/libfcitx5platforminputcontextplugin.so
@ -109,7 +104,7 @@ jobs:
chmod a+x linuxdeploy-plugin-qt-x86_64.AppImage
wget -c -nv "https://github.com/linuxdeploy/linuxdeploy/releases/download/continuous/linuxdeploy-x86_64.AppImage"
chmod a+x linuxdeploy-x86_64.AppImage
./linuxdeploy-x86_64.AppImage --appdir appdir --output appimage --plugin qt -i redist/icons/goldendict.png -d redist/org.xiaoyifang.GoldenDict_NG.desktop
./linuxdeploy-x86_64.AppImage --appdir appdir --output appimage --plugin qt -i redist/icons/goldendict.png -d redist/io.github.xiaoyifang.goldendict_ng.desktop
- name: changelog
id: changelog
@ -178,7 +173,7 @@ jobs:
file: ${{ steps.vars.outputs.appname }}
asset_name: ${{ matrix.qt_ver }}-${{ steps.vars.outputs.appname }}
tag: tag-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
tag: v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
overwrite: true
release_name: GoldenDict-ng-v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.release_hm }}.${{ steps.vars.outputs.sha_short }}
prerelease: ${{env.prerelease}}

View file

@ -26,7 +26,7 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest]
qt_ver: [5.15.2,6.5.1]
qt_ver: [5.15.2,6.5.2]
qt_arch: [gcc_64]
steps:

View file

@ -93,7 +93,7 @@ jobs:
chmod a+x linuxdeploy-plugin-qt-x86_64.AppImage
wget -c -nv "https://github.com/linuxdeploy/linuxdeploy/releases/download/continuous/linuxdeploy-x86_64.AppImage"
chmod a+x linuxdeploy-x86_64.AppImage
./linuxdeploy-x86_64.AppImage --appdir appdir --output appimage --plugin qt -i redist/icons/goldendict.png -d redist/org.xiaoyifang.GoldenDict_NG.desktop
./linuxdeploy-x86_64.AppImage --appdir appdir --output appimage --plugin qt -i redist/icons/goldendict.png -d redist/io.github.xiaoyifang.goldendict_ng.desktop
- name: changelog
id: changelog
@ -160,7 +160,7 @@ jobs:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: ${{ steps.vars.outputs.appname }}
asset_name: ${{ matrix.qt_ver }}-${{ steps.vars.outputs.appname }}
tag: tag-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
tag: v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
overwrite: true
release_name: GoldenDict-ng-v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.release_hm }}.${{ steps.vars.outputs.sha_short }}
prerelease: ${{env.prerelease}}

View file

@ -27,7 +27,7 @@ jobs:
strategy:
matrix:
os: [windows-2019]
qt_ver: [6.5.1]
qt_ver: [6.5.2]
qt_arch: [win64_msvc2019_64]
env:
targetName: GoldenDict.exe
@ -180,7 +180,7 @@ jobs:
file: release/${{ env.targetName }}
asset_name: ${{ matrix.qt_ver }}.${{ matrix.os }}-${{ env.targetName }}
tag: tag-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
tag: v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
overwrite: true
release_name: GoldenDict-ng-v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.release_hm }}.${{ steps.vars.outputs.sha_short }}
prerelease: ${{env.prerelease}}
@ -192,7 +192,7 @@ jobs:
file: release/GoldenDict.pdb
asset_name: ${{ matrix.qt_ver }}.${{ matrix.os }}-GoldenDict.pdb
tag: tag-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
tag: v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
overwrite: true
release_name: GoldenDict-ng-v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.release_hm }}.${{ steps.vars.outputs.sha_short }}
prerelease: ${{env.prerelease}}
@ -217,7 +217,7 @@ jobs:
file: ${{ steps.package.outputs.packageName }}/GoldenDict-ng-v23-Installer.exe
asset_name: ${{ matrix.qt_ver }}-GoldenDict-ng-Installer.exe
tag: tag-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
tag: v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
overwrite: true
release_name: GoldenDict-ng-v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.release_hm }}.${{ steps.vars.outputs.sha_short }}
prerelease: ${{env.prerelease}}
@ -230,7 +230,7 @@ jobs:
file: ${{ steps.package.outputs.packageName }}.zip
asset_name: ${{ matrix.qt_ver }}-${{ env.targetName }}_${{ matrix.os }}_${{steps.vars.outputs.release_date}}.zip
tag: tag-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
tag: v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
overwrite: true
release_name: GoldenDict-ng-v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.release_hm }}.${{ steps.vars.outputs.sha_short }}
prerelease: ${{env.prerelease}}

View file

@ -27,7 +27,7 @@ jobs:
strategy:
matrix:
os: [windows-2019]
qt_ver: [5.15.2,6.5.1]
qt_ver: [5.15.2,6.5.2]
qt_arch: [win64_msvc2019_64]
steps:
- uses: actions/setup-python@v3

View file

@ -165,7 +165,7 @@ jobs:
file: release/${{ env.targetName }}
asset_name: ${{ matrix.qt_ver }}-${{ env.targetName }}
tag: tag-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
tag: v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
overwrite: true
release_name: GoldenDict-ng-v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.release_hm }}.${{ steps.vars.outputs.sha_short }}
prerelease: ${{env.prerelease}}
@ -194,7 +194,7 @@ jobs:
file: ${{ steps.package.outputs.packageName }}/GoldenDict-ng-v23-Installer.exe
asset_name: ${{ matrix.qt_ver }}-GoldenDict-ng-Installer.exe
tag: tag-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
tag: v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
overwrite: true
release_name: GoldenDict-ng-v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.release_hm }}.${{ steps.vars.outputs.sha_short }}
prerelease: ${{env.prerelease}}
@ -205,7 +205,7 @@ jobs:
file: ${{ steps.package.outputs.packageName }}.zip
asset_name: ${{ matrix.qt_ver }}-${{ env.targetName }}_${{ matrix.os }}_${{steps.vars.outputs.release_date}}.zip
tag: tag-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
tag: v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.sha_short }}
overwrite: true
release_name: GoldenDict-ng-v${{env.version}}-${{env.version-suffix}}.${{ steps.vars.outputs.release_hm }}.${{ steps.vars.outputs.sha_short }}
prerelease: ${{env.prerelease}}

View file

@ -42,16 +42,19 @@ find_package(Qt6 REQUIRED COMPONENTS
#### Compile time files and preprocessor flags
# Obtain git commit hash
execute_process(
COMMAND git rev-parse --short=8 HEAD
WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}
OUTPUT_VARIABLE GIT_HASH
OUTPUT_STRIP_TRAILING_WHITESPACE
)
string(TIMESTAMP date_for_version_file) # note: this variable is cached for every run, but for user installation, this doesn't matter much
configure_file(version.txt.in ${CMAKE_SOURCE_DIR}/version.txt)
block() # generate version.txt
string(TIMESTAMP build_time)
find_package(Git)
if (EXISTS "${CMAKE_SOURCE_DIR}/.git" AND GIT_FOUND)
execute_process(
COMMAND ${GIT_EXECUTABLE} -C "${CMAKE_SOURCE_DIR}" rev-parse --short HEAD
OUTPUT_STRIP_TRAILING_WHITESPACE
OUTPUT_VARIABLE GIT_HASH)
file(WRITE "${CMAKE_SOURCE_DIR}/version.txt" "${PROJECT_VERSION}.${GIT_HASH} at ${build_time}")
else () # not built in a git repo
file(WRITE "${CMAKE_SOURCE_DIR}/version.txt" "${PROJECT_VERSION} at ${build_time}")
endif ()
endblock()
#### Sources Files
@ -191,8 +194,8 @@ if (APPLE)
endif ()
if (LINUX OR BSD)
install(FILES ${CMAKE_SOURCE_DIR}/redist/org.xiaoyifang.GoldenDict_NG.desktop DESTINATION share/applications)
install(FILES ${CMAKE_SOURCE_DIR}/redist/org.xiaoyifang.GoldenDict_NG.metainfo.xml DESTINATION share/metainfo)
install(FILES ${CMAKE_SOURCE_DIR}/redist/io.github.xiaoyifang.goldendict_ng.desktop DESTINATION share/applications)
install(FILES ${CMAKE_SOURCE_DIR}/redist/io.github.xiaoyifang.goldendict_ng.metainfo.xml DESTINATION share/metainfo)
install(FILES ${CMAKE_SOURCE_DIR}/redist/icons/goldendict.png DESTINATION share/pixmaps)

View file

@ -22,6 +22,7 @@ set(THIRD_PARTY_LIBARY
debug ${CMAKE_SOURCE_DIR}/winlibs/lib/dbg/hunspell-1.7.lib optimized ${CMAKE_SOURCE_DIR}/winlibs/lib/hunspell-1.7.lib
debug ${CMAKE_SOURCE_DIR}/winlibs/lib/dbg/zim.lib optimized ${CMAKE_SOURCE_DIR}/winlibs/lib/zim.lib
debug ${CMAKE_SOURCE_DIR}/winlibs/lib/dbg/opencc.lib optimized ${CMAKE_SOURCE_DIR}/winlibs/lib/opencc.lib
debug ${CMAKE_SOURCE_DIR}/winlibs/lib/dbg/zlibd.lib optimized ${CMAKE_SOURCE_DIR}/winlibs/lib/zlib.lib
)
target_link_libraries(${GOLDENDICT} PRIVATE ${THIRD_PARTY_LIBARY})

View file

@ -77,6 +77,7 @@ win32{
CONFIG( use_breakpad ) {
DEFINES += USE_BREAKPAD
#the lib ,include files are copied from vcpkg install package.
LIBS += -L$$PWD/thirdparty/breakpad/lib/ -llibbreakpad -llibbreakpad_client
@ -119,8 +120,8 @@ LIBS += -lbz2 \
-llzo2
win32{
Debug: LIBS+= -lzlibd
Release: LIBS+= -lzlib
Debug: LIBS+= -L$$PWD/winlibs/lib/dbg/ -lzlibd
Release: LIBS+= -L$$PWD/winlibs/lib/ -lzlib
}else{
LIBS += -lz
}

63
icons/fts_disabled.svg Normal file
View file

@ -0,0 +1,63 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
width="6.3500009mm"
height="6.3484006mm"
viewBox="0 0 6.3500009 6.3484008"
version="1.1"
id="svg384"
inkscape:version="0.92.4 (5da689c313, 2019-01-14)"
sodipodi:docname="fts_disabled.svg">
<metadata
id="metadata827">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
</cc:Work>
</rdf:RDF>
</metadata>
<sodipodi:namedview
id="namedview386"
pagecolor="#ffffff"
bordercolor="#000000"
borderopacity="0.25"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="mm"
showgrid="false"
inkscape:zoom="0.71980919"
inkscape:cx="278.04436"
inkscape:cy="-81.638345"
inkscape:window-width="1920"
inkscape:window-height="1017"
inkscape:window-x="-8"
inkscape:window-y="-8"
inkscape:window-maximized="1"
inkscape:current-layer="layer1" />
<defs
id="defs381" />
<g
inkscape:label="图层 1"
inkscape:groupmode="layer"
id="layer1"
transform="translate(-77.276611,-133.65252)">
<path
id="path505"
style="fill:#ff5555;stroke:#000000;stroke-width:0.01366357"
d="m 82.917815,133.66249 -2.512215,2.25622 -1.94873,-2.14187 -0.722718,0.68025 c 1e-5,7e-5 1.5e-5,1.6e-4 2.6e-5,2.7e-4 8.47e-4,0.006 0.0013,0.0115 0.0019,0.0173 2.91e-4,0.003 5.56e-4,0.006 8.47e-4,0.008 l 1.942459,2.08806 -1.770682,1.59025 -0.623782,0.93407 0.396943,0.55282 2.70347,-2.31826 2.475782,2.66138 0.642703,-0.6672 -2.385907,-2.62239 2.4993,-2.14321 z"
inkscape:connector-curvature="0" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 2.2 KiB

63
icons/fts_enabled.svg Normal file
View file

@ -0,0 +1,63 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
width="6.349782mm"
height="6.3508086mm"
viewBox="0 0 6.3497821 6.3508086"
version="1.1"
id="svg5"
inkscape:version="0.92.4 (5da689c313, 2019-01-14)"
sodipodi:docname="fts_enabled.svg">
<metadata
id="metadata841">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
</cc:Work>
</rdf:RDF>
</metadata>
<sodipodi:namedview
id="namedview7"
pagecolor="#ffffff"
bordercolor="#000000"
borderopacity="0.25"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="mm"
showgrid="false"
inkscape:zoom="5.7584736"
inkscape:cx="14.46914"
inkscape:cy="-5.2492094"
inkscape:window-width="1920"
inkscape:window-height="1017"
inkscape:window-x="-8"
inkscape:window-y="-8"
inkscape:window-maximized="1"
inkscape:current-layer="layer1" />
<defs
id="defs2" />
<g
inkscape:label="图层 1"
inkscape:groupmode="layer"
id="layer1"
transform="translate(-95.083595,-117.75641)">
<path
id="path309"
style="fill:#008000;stroke:#000000;stroke-width:0.0159395"
d="m 100.9701,117.76957 -3.584216,4.90523 -1.792095,-1.99198 -0.501764,0.76135 c 0.0017,0.009 0.0029,0.0187 0.0038,0.0289 l 2.053789,2.62099 4.273476,-5.77669 -0.15756,-0.1743 z"
inkscape:connector-curvature="0" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 2 KiB

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -2,7 +2,7 @@
Type=Application
Terminal=false
Categories=Office;Dictionary;Education;Qt;
Name=GoldenDict-NG
Name=GoldenDict-ng
GenericName=Multiformat Dictionary
GenericName[zh_CN]=
Comment=A feature-rich dictionary lookup program

View file

@ -0,0 +1,48 @@
<?xml version="1.0" encoding="UTF-8"?>
<!-- Copyright 2019 Vitaly Zaitsev <vitaly@easycoding.org> -->
<component type="desktop">
<id>io.github.xiaoyifang.goldendict_ng</id>
<metadata_license>CC0-1.0</metadata_license>
<project_license>GPL-3.0-or-later</project_license>
<name>GoldenDict-ng</name>
<summary>Advanced dictionary lookup program</summary>
<categories>
<category>Education</category>
<category>Dictionary</category>
<category>Languages</category>
</categories>
<description>
<p>
GoldenDict-ng is a feature-rich dictionary lookup program, supporting multiple
dictionary formats, featuring perfect article rendering with the complete
markup, illustrations and other content retained, and allowing you to type
in words without any accents or correct case.
</p>
</description>
<screenshots>
<screenshot type="default">

</screenshot>
<screenshot>

</screenshot>
<screenshot>

</screenshot>
</screenshots>
<url type="homepage">https://xiaoyifang.github.io/goldendict-ng/</url>
<url type="help">https://xiaoyifang.github.io/goldendict-ng/</url>
<url type="bugtracker">https://github.com/xiaoyifang/goldendict-ng/issues</url>
<url type="contact">https://github.com/xiaoyifang/goldendict-ng/discussions</url>
<url type="vcs-browser">https://github.com/xiaoyifang/goldendict-ng</url>
<update_contact>https://github.com/xiaoyifang/goldendict-ng</update_contact>
<launchable type="desktop-id">io.github.xiaoyifang.goldendict_ng.desktop</launchable>
<content_rating type="oars-1.0"/>
<provides>
<id>org.goldendict_ng.desktop</id>
</provides>
<releases>
<release version="23.07.23" date="2023-07-23"/>
<release version="23.06.01" date="2023-06-01"/>
</releases>
</component>

View file

@ -1,35 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!-- Copyright 2019 Vitaly Zaitsev <vitaly@easycoding.org> -->
<component type="desktop">
<id>org.xiaoyifang.GoldenDict_NG</id>
<metadata_license>CC0-1.0</metadata_license>
<project_license>GPL-3.0-or-later</project_license>
<name>GoldenDict-NG</name>
<summary>Dictionary lookup program</summary>
<description>
<p>
GoldenDict-NG is a feature-rich dictionary lookup program, supporting multiple
dictionary formats, featuring perfect article rendering with the complete
markup, illustrations and other content retained, and allowing you to type
in words without any accents or correct case.
</p>
</description>
<screenshots>
<screenshot type="default">

</screenshot>
<screenshot>

</screenshot>
<screenshot>

</screenshot>
</screenshots>
<url type="homepage">https://github.com/xiaoyifang/goldendict-ng</url>
<update_contact>https://github.com/xiaoyifang/goldendict-ng</update_contact>
<launchable type="desktop-id">org.xiaoyifang.GoldenDict_NG.desktop</launchable>
<content_rating type="oars-1.0" />
<provides>
<id>org.goldendict_ng.desktop</id>
</provides>
</component>

View file

@ -92,5 +92,7 @@
<file>icons/old-downarrow.png</file>
<file>icons/custom_trans.svg</file>
<file>icons/splash.png</file>
<file>icons/fts_disabled.svg</file>
<file>icons/fts_enabled.svg</file>
</qresource>
</RCC>

View file

@ -445,7 +445,7 @@ qint64 ArticleResourceReply::readData( char * out, qint64 maxSize )
}
qint64 toRead = maxSize < left ? maxSize : left;
GD_DPRINTF( "====reading %d of (%d) bytes . Finished: %d", (int)toRead, avail, finished );
GD_DPRINTF( "====reading %d of (%lld) bytes . Finished: %d", (int)toRead, avail, finished );
try {
req->getDataSlice( alreadyRead, toRead, out );

View file

@ -6,13 +6,17 @@
std::string addAudioLink( std::string const & url, std::string const & dictionaryId )
{
if ( url.empty() || url.length() < 2 )
return {};
GlobalBroadcaster::instance()->pronounce_engine.sendAudio(
dictionaryId,
QString::fromStdString( url.substr( 1, url.length() - 2 ) ) );
return addAudioLink( QString::fromStdString( url ), dictionaryId );
}
return std::string( "<script type=\"text/javascript\">" + makeAudioLinkScript( url, dictionaryId ) + "</script>" );
std::string addAudioLink( QString const & url, std::string const & dictionaryId )
{
if ( url.isEmpty() || url.length() < 2 )
return {};
GlobalBroadcaster::instance()->pronounce_engine.sendAudio( dictionaryId, url.mid( 1, url.length() - 2 ) );
return std::string( "<script type=\"text/javascript\">" + makeAudioLinkScript( url.toStdString(), dictionaryId )
+ "</script>" );
}
std::string makeAudioLinkScript( std::string const & url, std::string const & dictionaryId )

View file

@ -5,6 +5,7 @@
#define __AUDIOLINK_HH_INCLUDED__
#include <QString>
#include <string>
/// Adds a piece of javascript to save the given audiolink to a special
@ -13,7 +14,7 @@
/// The url should be escaped and surrounded by quotes.
/// The dictionary id is used to make active dictionary feature work.
std::string addAudioLink( std::string const & url, std::string const & dictionaryId );
std::string addAudioLink( QString const & url, std::string const & dictionaryId );
std::string makeAudioLinkScript( std::string const & url, std::string const & dictionaryId );
#endif

View file

@ -58,7 +58,7 @@ public:
AudioOutput * audioOutput = nullptr;
QByteArray buffer;
qint64 offset = 0;
bool quit = 0;
bool quit = false;
QMutex mutex;
QWaitCondition cond;
QThreadPool threadPool;
@ -92,7 +92,6 @@ public:
memcpy( &data[ bytesWritten ], sampleData, toWrite );
buffer.remove( 0, toWrite );
bytesWritten += toWrite;
// data += toWrite;
len -= toWrite;
}
@ -130,9 +129,10 @@ public:
QObject::connect( audioOutput, &AudioOutput::stateChanged, audioOutput, [ & ]( QAudio::State state ) {
switch ( state ) {
case QAudio::StoppedState:
quit = true;
if ( audioOutput->error() != QAudio::NoError ) {
qWarning() << "QAudioOutput stopped:" << audioOutput->error();
quit = true;
}
break;
default:
@ -167,6 +167,14 @@ public:
}
};
void AudioOutput::stop()
{
Q_D( AudioOutput );
d->quit = true;
d->cond.wakeAll();
d->audioPlayFuture.waitForFinished();
}
AudioOutput::AudioOutput( QObject * parent ):
QObject( parent ),
d_ptr( new AudioOutputPrivate )

View file

@ -13,6 +13,7 @@ public:
bool play( const uint8_t * data, qint64 len );
void setAudioFormat( int sampleRate, int channels );
void stop();
protected:
QScopedPointer< AudioOutputPrivate > d_ptr;

View file

@ -252,12 +252,24 @@ inline std::pair< bool, QString > getQueryWord( QUrl const & url )
inline bool isAudioUrl( QUrl const & url )
{
if ( !url.isValid() )
return false;
// Note: we check for forvo sound links explicitly, as they don't have extensions
return ( url.scheme() == "http" || url.scheme() == "https" || url.scheme() == "gdau" )
&& ( Filetype::isNameOfSound( url.path().toUtf8().data() ) || url.host() == "apifree.forvo.com" );
}
inline bool isWebAudioUrl( QUrl const & url )
{
if ( !url.isValid() )
return false;
// Note: we check for forvo sound links explicitly, as they don't have extensions
return ( url.scheme() == "http" || url.scheme() == "https" )
&& ( Filetype::isNameOfSound( url.path().toUtf8().data() ) || url.host() == "apifree.forvo.com" );
}
/// Uses some heuristics to chop off the first domain name from the host name,
/// but only if it's not too base. Returns the resulting host name.
inline QString getHostBase( QString const & host )

View file

@ -1,10 +1,7 @@
#include "decompress.hh"
#include "zlib.h"
#include "bzlib.h"
#ifdef MAKE_ZIM_SUPPORT
#include "lzma.h"
#endif
#include <zlib.h>
#include <bzlib.h>
#include <lzma.h>
#define CHUNK_SIZE 2048
@ -74,8 +71,6 @@ string decompressBzip2( const char * bufptr, unsigned length )
return str;
}
#ifdef MAKE_ZIM_SUPPORT
string decompressLzma2( const char * bufptr, unsigned length, bool raw_decoder )
{
string str;
@ -122,5 +117,3 @@ string decompressLzma2( const char * bufptr, unsigned length, bool raw_decoder )
}
return str;
}
#endif

View file

@ -12,10 +12,6 @@ string decompressZlib( const char * bufptr, unsigned length );
string decompressBzip2( const char * bufptr, unsigned length );
#ifdef MAKE_ZIM_SUPPORT
string decompressLzma2( const char * bufptr, unsigned length, bool raw_decoder = false );
#endif
#endif // DECOMPRESS_HH

View file

@ -263,7 +263,7 @@ public:
void setFTSParameters( Config::FullTextSearch const & fts ) override
{
can_FTS = fts.enabled && !fts.disabledTypes.contains( "AARD", Qt::CaseInsensitive )
can_FTS = enable_FTS && fts.enabled && !fts.disabledTypes.contains( "AARD", Qt::CaseInsensitive )
&& ( fts.maxDictionarySize == 0 || getArticleCount() <= fts.maxDictionarySize );
}
@ -302,12 +302,7 @@ AardDictionary::AardDictionary( string const & id, string const & indexFile, vec
// Full-text search parameters
can_FTS = true;
ftsIdxName = indexFile + Dictionary::getFtsSuffix();
if ( !Dictionary::needToRebuildIndex( dictionaryFiles, ftsIdxName ) && !FtsHelpers::ftsIndexIsOldOrBad( this ) )
FTS_index_completed.ref();
}
AardDictionary::~AardDictionary()
@ -339,8 +334,8 @@ string AardDictionary::convert( const string & in )
char inCh, lastCh = 0;
bool afterEol = false;
for ( string::const_iterator i = in.begin(), j = in.end(); i != j; ++i ) {
inCh = *i;
for ( char i : in ) {
inCh = i;
if ( lastCh == '\\' ) {
inConverted.erase( inConverted.size() - 1 );
lastCh = 0;
@ -454,14 +449,14 @@ void AardDictionary::loadArticle( quint32 address, string & articleText, bool ra
string encodedLink;
encodedLink.reserve( link.size() );
bool prev = false;
for ( string::const_iterator i = link.begin(); i != link.end(); ++i ) {
if ( *i == '\\' ) {
for ( char i : link ) {
if ( i == '\\' ) {
if ( !prev ) {
prev = true;
continue;
}
}
encodedLink.push_back( *i );
encodedLink.push_back( i );
prev = false;
}
encodedLink =
@ -647,10 +642,10 @@ void AardArticleRequest::run()
vector< WordArticleLink > chain = dict.findArticles( word, ignoreDiacritics );
for ( unsigned x = 0; x < alts.size(); ++x ) {
for ( const auto & alt : alts ) {
/// Make an additional query for each alt
vector< WordArticleLink > altChain = dict.findArticles( alts[ x ], ignoreDiacritics );
vector< WordArticleLink > altChain = dict.findArticles( alt, ignoreDiacritics );
chain.insert( chain.end(), altChain.begin(), altChain.end() );
}
@ -665,22 +660,22 @@ void AardArticleRequest::run()
if ( ignoreDiacritics )
wordCaseFolded = Folding::applyDiacriticsOnly( wordCaseFolded );
for ( unsigned x = 0; x < chain.size(); ++x ) {
for ( auto & x : chain ) {
if ( Utils::AtomicInt::loadAcquire( isCancelled ) ) {
finish();
return;
}
if ( articlesIncluded.find( chain[ x ].articleOffset ) != articlesIncluded.end() )
if ( articlesIncluded.find( x.articleOffset ) != articlesIncluded.end() )
continue; // We already have this article in the body.
// Now grab that article
string headword, articleText;
headword = chain[ x ].word;
headword = x.word;
try {
dict.loadArticle( chain[ x ].articleOffset, articleText );
dict.loadArticle( x.articleOffset, articleText );
}
catch ( ... ) {
}
@ -699,7 +694,7 @@ void AardArticleRequest::run()
mapToUse.insert( pair( Folding::applySimpleCaseOnly( headword ), pair( headword, articleText ) ) );
articlesIncluded.insert( chain[ x ].articleOffset );
articlesIncluded.insert( x.articleOffset );
}
if ( mainArticles.empty() && alternateArticles.empty() ) {
@ -752,16 +747,16 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
{
vector< sptr< Dictionary::Class > > dictionaries;
for ( vector< string >::const_iterator i = fileNames.begin(); i != fileNames.end(); ++i ) {
for ( const auto & fileName : fileNames ) {
// Skip files with the extensions different to .aar to speed up the
// scanning
if ( i->size() < 4 || strcasecmp( i->c_str() + ( i->size() - 4 ), ".aar" ) != 0 )
if ( fileName.size() < 4 || strcasecmp( fileName.c_str() + ( fileName.size() - 4 ), ".aar" ) != 0 )
continue;
// Got the file -- check if we need to rebuid the index
vector< string > dictFiles( 1, *i );
vector< string > dictFiles( 1, fileName );
initializing.loadingDictionary( fileName );
string dictId = Dictionary::makeDictionaryId( dictFiles );
string indexFile = indicesDir + dictId;
@ -769,17 +764,17 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
if ( Dictionary::needToRebuildIndex( dictFiles, indexFile ) || indexIsOldOrBad( indexFile ) ) {
try {
gdDebug( "Aard: Building the index for dictionary: %s\n", i->c_str() );
gdDebug( "Aard: Building the index for dictionary: %s\n", fileName.c_str() );
{
QFileInfo info( QString::fromUtf8( i->c_str() ) );
QFileInfo info( QString::fromUtf8( fileName.c_str() ) );
if ( static_cast< quint64 >( info.size() ) > ULONG_MAX ) {
gdWarning( "File %s is too large\n", i->c_str() );
gdWarning( "File %s is too large\n", fileName.c_str() );
continue;
}
}
File::Class df( *i, "rb" );
File::Class df( fileName, "rb" );
AAR_header dictHeader;
@ -788,7 +783,7 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
if ( strncmp( dictHeader.signature, "aard", 4 )
|| ( !has64bitIndex && strncmp( dictHeader.indexItemFormat, ">LL", 4 ) )
|| strncmp( dictHeader.keyLengthFormat, ">H", 2 ) || strncmp( dictHeader.articleLengthFormat, ">L", 2 ) ) {
gdWarning( "File %s is not in supported aard format\n", i->c_str() );
gdWarning( "File %s is not in supported aard format\n", fileName.c_str() );
continue;
}
@ -796,7 +791,7 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
quint32 size = qFromBigEndian( dictHeader.metaLength );
if ( size == 0 ) {
gdWarning( "File %s has invalid metadata", i->c_str() );
gdWarning( "File %s has invalid metadata", fileName.c_str() );
continue;
}
@ -809,7 +804,7 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
map< string, string > meta = parseMetaData( metaStr );
if ( meta.empty() ) {
gdWarning( "File %s has invalid metadata", i->c_str() );
gdWarning( "File %s has invalid metadata", fileName.c_str() );
continue;
}
@ -951,7 +946,7 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
idx.write( &idxHeader, sizeof( idxHeader ) );
}
catch ( std::exception & e ) {
gdWarning( "Aard dictionary indexing failed: %s, error: %s\n", i->c_str(), e.what() );
gdWarning( "Aard dictionary indexing failed: %s, error: %s\n", fileName.c_str(), e.what() );
continue;
}
catch ( ... ) {
@ -963,7 +958,7 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
dictionaries.push_back( std::make_shared< AardDictionary >( dictId, indexFile, dictFiles ) );
}
catch ( std::exception & e ) {
gdWarning( "Aard dictionary initializing failed: %s, error: %s\n", i->c_str(), e.what() );
gdWarning( "Aard dictionary initializing failed: %s, error: %s\n", fileName.c_str(), e.what() );
continue;
}
}

View file

@ -225,7 +225,7 @@ public:
void setFTSParameters( Config::FullTextSearch const & fts ) override
{
can_FTS = fts.enabled && !fts.disabledTypes.contains( "BGL", Qt::CaseInsensitive )
can_FTS = enable_FTS && fts.enabled && !fts.disabledTypes.contains( "BGL", Qt::CaseInsensitive )
&& ( fts.maxDictionarySize == 0 || getArticleCount() <= fts.maxDictionarySize );
}
@ -270,13 +270,7 @@ BglDictionary::BglDictionary( string const & id, string const & indexFile, strin
openIndex( IndexInfo( idxHeader.indexBtreeMaxElements, idxHeader.indexRootOffset ), idx, idxMutex );
can_FTS = true;
ftsIdxName = indexFile + Dictionary::getFtsSuffix();
if ( !Dictionary::needToRebuildIndex( getDictionaryFilenames(), ftsIdxName )
&& !FtsHelpers::ftsIndexIsOldOrBad( this ) )
FTS_index_completed.ref();
}
void BglDictionary::loadIcon() noexcept
@ -405,13 +399,13 @@ void BglDictionary::getArticleText( uint32_t articleAddress, QString & headword,
wstring wstr = Utf8::decode( articleStr );
if ( getLangTo() == LangCoder::code2toInt( "he" ) ) {
for ( unsigned int i = 0; i < wstr.size(); i++ ) {
for ( char32_t & i : wstr ) {
if (
( wstr[ i ] >= 224 && wstr[ i ] <= 250 )
|| ( wstr[ i ] >= 192
&& wstr[ i ]
( i >= 224 && i <= 250 )
|| ( i >= 192
&& i
<= 210 ) ) // Hebrew chars encoded ecoded as windows-1255 or ISO-8859-8, or as vowel-points of windows-1255
wstr[ i ] += 1488 - 224; // Convert to Hebrew unicode
i += 1488 - 224; // Convert to Hebrew unicode
}
}
@ -492,7 +486,7 @@ void BglHeadwordsRequest::run()
wstring caseFolded = Folding::applySimpleCaseOnly( str );
for ( unsigned x = 0; x < chain.size(); ++x ) {
for ( auto & x : chain ) {
if ( Utils::AtomicInt::loadAcquire( isCancelled ) ) {
finish();
return;
@ -500,7 +494,7 @@ void BglHeadwordsRequest::run()
string headword, displayedHeadword, articleText;
dict.loadArticle( chain[ x ].articleOffset, headword, displayedHeadword, articleText );
dict.loadArticle( x.articleOffset, headword, displayedHeadword, articleText );
wstring headwordDecoded;
try {
@ -610,13 +604,13 @@ void BglArticleRequest::fixHebString( string & hebStr ) // Hebrew support - conv
return;
}
for ( unsigned int i = 0; i < hebWStr.size(); i++ ) {
for ( char32_t & i : hebWStr ) {
if (
( hebWStr[ i ] >= 224 && hebWStr[ i ] <= 250 )
|| ( hebWStr[ i ] >= 192
&& hebWStr[ i ]
( i >= 224 && i <= 250 )
|| ( i >= 192
&& i
<= 210 ) ) // Hebrew chars encoded ecoded as windows-1255 or ISO-8859-8, or as vowel-points of windows-1255
hebWStr[ i ] += 1488 - 224; // Convert to Hebrew unicode
i += 1488 - 224; // Convert to Hebrew unicode
}
hebStr = Utf8::encode( hebWStr );
}
@ -645,10 +639,10 @@ void BglArticleRequest::run()
static Language::Id hebrew = LangCoder::code2toInt( "he" ); // Hebrew support
for ( unsigned x = 0; x < alts.size(); ++x ) {
for ( const auto & alt : alts ) {
/// Make an additional query for each alt
vector< WordArticleLink > altChain = dict.findArticles( alts[ x ], ignoreDiacritics );
vector< WordArticleLink > altChain = dict.findArticles( alt, ignoreDiacritics );
chain.insert( chain.end(), altChain.begin(), altChain.end() );
}
@ -666,7 +660,7 @@ void BglArticleRequest::run()
if ( ignoreDiacritics )
wordCaseFolded = Folding::applyDiacriticsOnly( wordCaseFolded );
for ( unsigned x = 0; x < chain.size(); ++x ) {
for ( auto & x : chain ) {
if ( Utils::AtomicInt::loadAcquire( isCancelled ) ) {
finish();
return;
@ -674,14 +668,14 @@ void BglArticleRequest::run()
try {
if ( articlesIncluded.find( chain[ x ].articleOffset ) != articlesIncluded.end() )
if ( articlesIncluded.find( x.articleOffset ) != articlesIncluded.end() )
continue; // We already have this article in the body.
// Now grab that article
string headword, displayedHeadword, articleText;
dict.loadArticle( chain[ x ].articleOffset, headword, displayedHeadword, articleText );
dict.loadArticle( x.articleOffset, headword, displayedHeadword, articleText );
// Ok. Now, does it go to main articles, or to alternate ones? We list
// main ones first, and alternates after.
@ -714,7 +708,7 @@ void BglArticleRequest::run()
mapToUse.insert( pair( Folding::applySimpleCaseOnly( headword ), pair( targetHeadword, articleText ) ) );
articlesIncluded.insert( chain[ x ].articleOffset );
articlesIncluded.insert( x.articleOffset );
} // try
catch ( std::exception & ex ) {
@ -863,8 +857,8 @@ void BglResourceRequest::run()
string nameLowercased = name;
for ( string::iterator i = nameLowercased.begin(); i != nameLowercased.end(); ++i )
*i = tolower( *i );
for ( char & i : nameLowercased )
i = tolower( i );
QMutexLocker _( &idxMutex );
@ -1020,15 +1014,15 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
{
vector< sptr< Dictionary::Class > > dictionaries;
for ( vector< string >::const_iterator i = fileNames.begin(); i != fileNames.end(); ++i ) {
for ( const auto & fileName : fileNames ) {
// Skip files with the extensions different to .bgl to speed up the
// scanning
if ( i->size() < 4 || strcasecmp( i->c_str() + ( i->size() - 4 ), ".bgl" ) != 0 )
if ( fileName.size() < 4 || strcasecmp( fileName.c_str() + ( fileName.size() - 4 ), ".bgl" ) != 0 )
continue;
// Got the file -- check if we need to rebuid the index
vector< string > dictFiles( 1, *i );
vector< string > dictFiles( 1, fileName );
string dictId = Dictionary::makeDictionaryId( dictFiles );
@ -1037,10 +1031,10 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
if ( Dictionary::needToRebuildIndex( dictFiles, indexFile ) || indexIsOldOrBad( indexFile ) ) {
// Building the index
gdDebug( "Bgl: Building the index for dictionary: %s\n", i->c_str() );
gdDebug( "Bgl: Building the index for dictionary: %s\n", fileName.c_str() );
try {
Babylon b( *i );
Babylon b( fileName );
if ( !b.open() )
continue;
@ -1048,7 +1042,7 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
std::string sourceCharset, targetCharset;
if ( !b.read( sourceCharset, targetCharset ) ) {
gdWarning( "Failed to start reading from %s, skipping it\n", i->c_str() );
gdWarning( "Failed to start reading from %s, skipping it\n", fileName.c_str() );
continue;
}
@ -1127,8 +1121,8 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
addEntryToIndex( e.headword, articleAddress, indexedWords, wcharBuffer );
for ( unsigned x = 0; x < e.alternates.size(); ++x )
addEntryToIndex( e.alternates[ x ], articleAddress, indexedWords, wcharBuffer );
for ( auto & alternate : e.alternates )
addEntryToIndex( alternate, articleAddress, indexedWords, wcharBuffer );
wordCount += 1 + e.alternates.size();
++articleCount;
@ -1152,12 +1146,10 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
idxHeader.resourceListOffset = idx.tell();
idxHeader.resourcesCount = resourceHandler.getResources().size();
for ( list< pair< string, uint32_t > >::const_iterator j = resourceHandler.getResources().begin();
j != resourceHandler.getResources().end();
++j ) {
idx.write< uint32_t >( j->first.size() );
idx.write( j->first.data(), j->first.size() );
idx.write< uint32_t >( j->second );
for ( const auto & j : resourceHandler.getResources() ) {
idx.write< uint32_t >( j.first.size() );
idx.write( j.first.data(), j.first.size() );
idx.write< uint32_t >( j.second );
}
// That concludes it. Update the header.
@ -1176,15 +1168,15 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
idx.write( &idxHeader, sizeof( idxHeader ) );
}
catch ( std::exception & e ) {
gdWarning( "BGL dictionary indexing failed: %s, error: %s\n", i->c_str(), e.what() );
gdWarning( "BGL dictionary indexing failed: %s, error: %s\n", fileName.c_str(), e.what() );
}
}
try {
dictionaries.push_back( std::make_shared< BglDictionary >( dictId, indexFile, *i ) );
dictionaries.push_back( std::make_shared< BglDictionary >( dictId, indexFile, fileName ) );
}
catch ( std::exception & e ) {
gdWarning( "BGL dictionary initializing failed: %s, error: %s\n", i->c_str(), e.what() );
gdWarning( "BGL dictionary initializing failed: %s, error: %s\n", fileName.c_str(), e.what() );
}
}

View file

@ -140,7 +140,7 @@ public:
void setFTSParameters( Config::FullTextSearch const & fts ) override
{
can_FTS = fts.enabled && !fts.disabledTypes.contains( "DICTD", Qt::CaseInsensitive )
can_FTS = enable_FTS && fts.enabled && !fts.disabledTypes.contains( "DICTD", Qt::CaseInsensitive )
&& ( fts.maxDictionarySize == 0 || getArticleCount() <= fts.maxDictionarySize );
}
};
@ -177,12 +177,8 @@ DictdDictionary::DictdDictionary( string const & id,
// Full-text search parameters
can_FTS = true;
ftsIdxName = indexFile + Dictionary::getFtsSuffix();
if ( !Dictionary::needToRebuildIndex( dictionaryFiles, ftsIdxName ) && !FtsHelpers::ftsIndexIsOldOrBad( this ) )
FTS_index_completed.ref();
}
DictdDictionary::~DictdDictionary()
@ -254,10 +250,10 @@ sptr< Dictionary::DataRequest > DictdDictionary::getArticle( wstring const & wor
try {
vector< WordArticleLink > chain = findArticles( word, ignoreDiacritics );
for ( unsigned x = 0; x < alts.size(); ++x ) {
for ( const auto & alt : alts ) {
/// Make an additional query for each alt
vector< WordArticleLink > altChain = findArticles( alts[ x ], ignoreDiacritics );
vector< WordArticleLink > altChain = findArticles( alt, ignoreDiacritics );
chain.insert( chain.end(), altChain.begin(), altChain.end() );
}
@ -274,15 +270,15 @@ sptr< Dictionary::DataRequest > DictdDictionary::getArticle( wstring const & wor
char buf[ 16384 ];
for ( unsigned x = 0; x < chain.size(); ++x ) {
if ( articlesIncluded.find( chain[ x ].articleOffset ) != articlesIncluded.end() )
for ( auto & x : chain ) {
if ( articlesIncluded.find( x.articleOffset ) != articlesIncluded.end() )
continue; // We already have this article in the body.
// Now load that article
{
QMutexLocker _( &indexFileMutex );
indexFile.seek( chain[ x ].articleOffset );
indexFile.seek( x.articleOffset );
if ( !indexFile.gets( buf, sizeof( buf ), true ) )
throw exFailedToReadLineFromIndex();
@ -382,16 +378,16 @@ sptr< Dictionary::DataRequest > DictdDictionary::getArticle( wstring const & wor
// We do the case-folded comparison here.
wstring headwordStripped = Folding::applySimpleCaseOnly( chain[ x ].word );
wstring headwordStripped = Folding::applySimpleCaseOnly( x.word );
if ( ignoreDiacritics )
headwordStripped = Folding::applyDiacriticsOnly( headwordStripped );
multimap< wstring, string > & mapToUse =
( wordCaseFolded == headwordStripped ) ? mainArticles : alternateArticles;
mapToUse.insert( pair( Folding::applySimpleCaseOnly( chain[ x ].word ), articleText ) );
mapToUse.insert( pair( Folding::applySimpleCaseOnly( x.word ), articleText ) );
articlesIncluded.insert( chain[ x ].articleOffset );
articlesIncluded.insert( x.articleOffset );
}
if ( mainArticles.empty() && alternateArticles.empty() )
@ -551,17 +547,17 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
{
vector< sptr< Dictionary::Class > > dictionaries;
for ( vector< string >::const_iterator i = fileNames.begin(); i != fileNames.end(); ++i ) {
for ( const auto & fileName : fileNames ) {
// Only allow .index suffixes
if ( i->size() < 6 || strcasecmp( i->c_str() + ( i->size() - 6 ), ".index" ) != 0 )
if ( fileName.size() < 6 || strcasecmp( fileName.c_str() + ( fileName.size() - 6 ), ".index" ) != 0 )
continue;
try {
vector< string > dictFiles( 1, *i );
vector< string > dictFiles( 1, fileName );
// Check if there is an 'abrv' file present
string baseName( *i, 0, i->size() - 5 );
string baseName( fileName, 0, fileName.size() - 5 );
dictFiles.push_back( string() );
@ -718,7 +714,7 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
dictionaries.push_back( std::make_shared< DictdDictionary >( dictId, indexFile, dictFiles ) );
}
catch ( std::exception & e ) {
gdWarning( "Dictd dictionary \"%s\" reading failed, error: %s\n", i->c_str(), e.what() );
gdWarning( "Dictd dictionary \"%s\" reading failed, error: %s\n", fileName.c_str(), e.what() );
}
}

View file

@ -312,7 +312,7 @@ bool Class::loadIconFromText( QString iconUrl, QString const & text )
QFont font = painter.font();
//the text should be a little smaller than the icon
font.setPixelSize( iconSize * 0.6 );
font.setWeight( QFont::Black );
font.setWeight( QFont::Bold );
painter.setFont( font );
const QRect rectangle = QRect( 0, 0, iconSize, iconSize );
@ -320,6 +320,7 @@ bool Class::loadIconFromText( QString iconUrl, QString const & text )
//select a single char.
auto abbrName = getAbbrName( text );
painter.setPen( QColor( 4, 57, 108, 200 ) );
painter.drawText( rectangle, Qt::AlignCenter, abbrName );
painter.end();
@ -335,15 +336,20 @@ QString Class::getAbbrName( QString const & text )
{
if ( text.isEmpty() )
return QString();
//remove whitespace
//remove whitespace,number,mark,puncuation,symbol
QString simplified = text;
simplified.remove( QRegularExpression( "\\s" ) );
simplified.remove(
QRegularExpression( "[\\p{Z}\\p{N}\\p{M}\\p{P}\\p{S}]", QRegularExpression::UseUnicodePropertiesOption ) );
int index = qHash( simplified ) % simplified.size();
QString abbrName;
if ( !Utils::isCJKChar( simplified.at( index ).unicode() ) ) {
// take two chars.
abbrName = simplified.mid( index, 2 );
if ( abbrName.size() == 1 ) {
//make up two characters.
abbrName = abbrName + simplified.at( 0 );
}
}
else {
abbrName = simplified.mid( index, 1 );

View file

@ -312,6 +312,8 @@ protected:
QAtomicInt FTS_index_completed;
bool synonymSearchEnabled;
string dictionaryName;
//default to true;
bool enable_FTS = true;
// Load user icon if it exist
// By default set icon to empty
@ -369,6 +371,11 @@ public:
dictionaryName = _dictionaryName;
}
void setFtsEnable( bool _enable_FTS )
{
enable_FTS = _enable_FTS;
}
/// Returns all the available properties, like the author's name, copyright,
/// description etc. All strings are in utf8.
virtual map< Property, string > getProperties() noexcept = 0;
@ -541,6 +548,7 @@ public:
/// is useful to show in some kind of a splash screen.
/// The dictionaryName is in utf8.
virtual void indexingDictionary( string const & dictionaryName ) noexcept = 0;
virtual void loadingDictionary( string const & dictionaryName ) noexcept = 0;
virtual ~Initializing() = default;
};

View file

@ -276,8 +276,8 @@ QString const & DictServerDictionary::getDescription()
dictionaryDescription += "\n\n";
dictionaryDescription += QCoreApplication::translate( "DictServer", "Server databases" ) + " ("
+ QString::number( serverDatabases.size() ) + "):";
for ( QStringList::const_iterator i = serverDatabases.begin(); i != serverDatabases.end(); ++i )
dictionaryDescription += "\n" + *i;
for ( const auto & serverDatabase : serverDatabases )
dictionaryDescription += "\n" + serverDatabase;
}
}
return dictionaryDescription;
@ -838,14 +838,14 @@ vector< sptr< Dictionary::Class > > makeDictionaries( Config::DictServers const
{
vector< sptr< Dictionary::Class > > result;
for ( int x = 0; x < servers.size(); ++x ) {
if ( servers[ x ].enabled )
result.push_back( std::make_shared< DictServerDictionary >( servers[ x ].id.toStdString(),
servers[ x ].name.toUtf8().data(),
servers[ x ].url,
servers[ x ].databases,
servers[ x ].strategies,
servers[ x ].iconFilename ) );
for ( const auto & server : servers ) {
if ( server.enabled )
result.push_back( std::make_shared< DictServerDictionary >( server.id.toStdString(),
server.name.toUtf8().data(),
server.url,
server.databases,
server.strategies,
server.iconFilename ) );
}
return result;

View file

@ -240,7 +240,7 @@ public:
if ( ensureInitDone().size() )
return;
can_FTS = fts.enabled && !fts.disabledTypes.contains( "DSL", Qt::CaseInsensitive )
can_FTS = enable_FTS && fts.enabled && !fts.disabledTypes.contains( "DSL", Qt::CaseInsensitive )
&& ( fts.maxDictionarySize == 0 || getArticleCount() <= fts.maxDictionarySize );
}
@ -298,13 +298,9 @@ DslDictionary::DslDictionary( string const & id,
articleNom( 0 ),
maxPictureWidth( maxPictureWidth_ )
{
can_FTS = true;
ftsIdxName = indexFile + Dictionary::getFtsSuffix();
if ( !Dictionary::needToRebuildIndex( dictionaryFiles, ftsIdxName ) && !FtsHelpers::ftsIndexIsOldOrBad( this ) )
FTS_index_completed.ref();
// Read the dictionary name
idx.seek( sizeof( idxHeader ) );
@ -1750,6 +1746,8 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
|| File::tryPossibleName( baseName + "_ABRV.DSL.dz", abrvFileName ) )
dictFiles.push_back( abrvFileName );
initializing.loadingDictionary( fileName );
string dictId = Dictionary::makeDictionaryId( dictFiles );
// See if there's a zip file with resources present. If so, include it.

View file

@ -1,32 +1,30 @@
/* This file is (c) 2014 Abs62
* Part of GoldenDict. Licensed under GPLv3 or later, see the LICENSE file */
#ifndef NO_EPWING_SUPPORT
#include "epwing_book.hh"
#include "epwing.hh"
#include "epwing_book.hh"
#include "epwing.hh"
#include <QByteArray>
#include <QDir>
#include <QRunnable>
#include <QSemaphore>
#include <QByteArray>
#include <QDir>
#include <QRunnable>
#include <QSemaphore>
#include <map>
#include <QtConcurrent>
#include <set>
#include <string>
#include <QObject>
#include <map>
#include <QtConcurrent>
#include <set>
#include <string>
#include "btreeidx.hh"
#include "folding.hh"
#include "gddebug.hh"
#include "btreeidx.hh"
#include "folding.hh"
#include "gddebug.hh"
#include "chunkedstorage.hh"
#include "wstring.hh"
#include "wstring_qt.hh"
#include "utf8.hh"
#include "filetype.hh"
#include "ftshelpers.hh"
#include "globalregex.hh"
#include "sptr.hh"
#include "chunkedstorage.hh"
#include "wstring_qt.hh"
#include "filetype.hh"
#include "ftshelpers.hh"
#include "globalregex.hh"
#include "sptr.hh"
namespace Epwing {
@ -157,7 +155,7 @@ public:
if ( ensureInitDone().size() )
return;
can_FTS = fts.enabled && !fts.disabledTypes.contains( "EPWING", Qt::CaseInsensitive )
can_FTS = enable_FTS && fts.enabled && !fts.disabledTypes.contains( "EPWING", Qt::CaseInsensitive )
&& ( fts.maxDictionarySize == 0 || getArticleCount() <= fts.maxDictionarySize );
}
@ -253,12 +251,8 @@ EpwingDictionary::EpwingDictionary( string const & id,
// Full-text search parameters
can_FTS = true;
ftsIdxName = indexFile + Dictionary::getFtsSuffix();
if ( !Dictionary::needToRebuildIndex( dictionaryFiles, ftsIdxName ) && !FtsHelpers::ftsIndexIsOldOrBad( this ) )
FTS_index_completed.ref();
}
EpwingDictionary::~EpwingDictionary()
@ -1294,3 +1288,5 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
}
} // namespace Epwing
#endif

View file

@ -1,31 +1,33 @@
/* This file is (c) 2014 Abs62
* Part of GoldenDict. Licensed under GPLv3 or later, see the LICENSE file */
#include "epwing_book.hh"
#ifndef NO_EPWING_SUPPORT
#include <QDir>
#include <QTextStream>
#include <QTextDocumentFragment>
#include <QHash>
#include "gddebug.hh"
#include "epwing_book.hh"
#include "audiolink.hh"
#include "wstring.hh"
#include "wstring_qt.hh"
#include "folding.hh"
#include "epwing_charmap.hh"
#include "htmlescape.hh"
#if defined( Q_OS_WIN32 ) || defined( Q_OS_MAC )
#define _FILE_OFFSET_BITS 64
#endif
#include <QDir>
#include <QTextStream>
#include <QTextDocumentFragment>
#include <QHash>
#include "gddebug.hh"
#include <eb/text.h>
#include <eb/appendix.h>
#include <eb/error.h>
#include <eb/binary.h>
#include <eb/font.h>
#include "audiolink.hh"
#include "wstring.hh"
#include "wstring_qt.hh"
#include "folding.hh"
#include "epwing_charmap.hh"
#include "htmlescape.hh"
#if defined( Q_OS_WIN32 ) || defined( Q_OS_MAC )
#define _FILE_OFFSET_BITS 64
#endif
#define HitsBufferSize 512
#include <eb/text.h>
#include <eb/appendix.h>
#include <eb/error.h>
#include <eb/binary.h>
#include <eb/font.h>
#define HitsBufferSize 512
namespace Epwing {
@ -1875,3 +1877,5 @@ QMutex EpwingBook::libMutex;
} // namespace Book
} // namespace Epwing
#endif

View file

@ -1,7 +1,9 @@
/* This file is (c) 2014 Abs62
* Part of GoldenDict. Licensed under GPLv3 or later, see the LICENSE file */
#include "epwing_charmap.hh"
#ifndef NO_EPWING_SUPPORT
#include "epwing_charmap.hh"
namespace Epwing {
@ -25,3 +27,5 @@ void EpwingCharmap::addEntry( QString const & code, int ch )
}
} // namespace Epwing
#endif

View file

@ -119,8 +119,8 @@ ForvoArticleRequest::ForvoArticleRequest( wstring const & str,
addQuery( mgr, str );
for ( unsigned x = 0; x < alts.size(); ++x )
addQuery( mgr, alts[ x ] );
for ( const auto & alt : alts )
addQuery( mgr, alt );
}
void ForvoArticleRequest::addQuery( QNetworkAccessManager & mgr, wstring const & str )
@ -158,9 +158,9 @@ void ForvoArticleRequest::requestFinished( QNetworkReply * r )
bool found = false;
for ( NetReplies::iterator i = netReplies.begin(); i != netReplies.end(); ++i ) {
if ( i->reply.get() == r ) {
i->finished = true; // Mark as finished
for ( auto & netReplie : netReplies ) {
if ( netReplie.reply.get() == r ) {
netReplie.finished = true; // Mark as finished
found = true;
break;
}
@ -312,8 +312,8 @@ makeDictionaries( Dictionary::Initializing &, Config::Forvo const & forvo, QNetw
QSet< QString > usedCodes;
for ( int x = 0; x < codes.size(); ++x ) {
QString code = codes[ x ].simplified();
for ( const auto & x : codes ) {
QString code = x.simplified();
if ( code.size() && !usedCodes.contains( code ) ) {
// Generate id

View file

@ -408,7 +408,7 @@ public:
void setFTSParameters( Config::FullTextSearch const & fts ) override
{
can_FTS = fts.enabled && !fts.disabledTypes.contains( "GLS", Qt::CaseInsensitive )
can_FTS = enable_FTS && fts.enabled && !fts.disabledTypes.contains( "GLS", Qt::CaseInsensitive )
&& ( fts.maxDictionarySize == 0 || getArticleCount() <= fts.maxDictionarySize );
}
@ -477,12 +477,8 @@ GlsDictionary::GlsDictionary( string const & id, string const & indexFile, vecto
// Full-text search parameters
can_FTS = true;
ftsIdxName = indexFile + Dictionary::getFtsSuffix();
if ( !Dictionary::needToRebuildIndex( dictionaryFiles, ftsIdxName ) && !FtsHelpers::ftsIndexIsOldOrBad( this ) )
FTS_index_completed.ref();
}
GlsDictionary::~GlsDictionary()
@ -847,7 +843,7 @@ void GlsHeadwordsRequest::run()
wstring caseFolded = Folding::applySimpleCaseOnly( word );
for ( unsigned x = 0; x < chain.size(); ++x ) {
for ( auto & x : chain ) {
if ( Utils::AtomicInt::loadAcquire( isCancelled ) ) {
finish();
return;
@ -856,7 +852,7 @@ void GlsHeadwordsRequest::run()
string articleText;
vector< string > headwords;
dict.loadArticleText( chain[ x ].articleOffset, headwords, articleText );
dict.loadArticleText( x.articleOffset, headwords, articleText );
wstring headwordDecoded = Utf8::decode( headwords.front() );
@ -936,10 +932,10 @@ void GlsArticleRequest::run()
try {
vector< WordArticleLink > chain = dict.findArticles( word, ignoreDiacritics );
for ( unsigned x = 0; x < alts.size(); ++x ) {
for ( const auto & alt : alts ) {
/// Make an additional query for each alt
vector< WordArticleLink > altChain = dict.findArticles( alts[ x ], ignoreDiacritics );
vector< WordArticleLink > altChain = dict.findArticles( alt, ignoreDiacritics );
chain.insert( chain.end(), altChain.begin(), altChain.end() );
}
@ -954,20 +950,20 @@ void GlsArticleRequest::run()
if ( ignoreDiacritics )
wordCaseFolded = Folding::applyDiacriticsOnly( wordCaseFolded );
for ( unsigned x = 0; x < chain.size(); ++x ) {
for ( auto & x : chain ) {
if ( Utils::AtomicInt::loadAcquire( isCancelled ) ) {
finish();
return;
}
if ( articlesIncluded.find( chain[ x ].articleOffset ) != articlesIncluded.end() )
if ( articlesIncluded.find( x.articleOffset ) != articlesIncluded.end() )
continue; // We already have this article in the body.
// Now grab that article
string headword, articleText;
dict.loadArticle( chain[ x ].articleOffset, headword, articleText );
dict.loadArticle( x.articleOffset, headword, articleText );
// Ok. Now, does it go to main articles, or to alternate ones? We list
// main ones first, and alternates after.
@ -984,7 +980,7 @@ void GlsArticleRequest::run()
mapToUse.insert(
pair( Folding::applySimpleCaseOnly( Utf8::decode( headword ) ), pair( headword, articleText ) ) );
articlesIncluded.insert( chain[ x ].articleOffset );
articlesIncluded.insert( x.articleOffset );
}
if ( mainArticles.empty() && alternateArticles.empty() ) {
@ -1196,24 +1192,24 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
{
vector< sptr< Dictionary::Class > > dictionaries;
for ( vector< string >::const_iterator i = fileNames.begin(); i != fileNames.end(); ++i ) {
for ( const auto & fileName : fileNames ) {
// Try .gls and .gls.dz suffixes
if ( !( i->size() >= 4 && strcasecmp( i->c_str() + ( i->size() - 4 ), ".gls" ) == 0 )
&& !( i->size() >= 7 && strcasecmp( i->c_str() + ( i->size() - 7 ), ".gls.dz" ) == 0 ) )
if ( !( fileName.size() >= 4 && strcasecmp( fileName.c_str() + ( fileName.size() - 4 ), ".gls" ) == 0 )
&& !( fileName.size() >= 7 && strcasecmp( fileName.c_str() + ( fileName.size() - 7 ), ".gls.dz" ) == 0 ) )
continue;
unsigned atLine = 0; // Indicates current line in .gls, for debug purposes
try {
vector< string > dictFiles( 1, *i );
vector< string > dictFiles( 1, fileName );
string dictId = Dictionary::makeDictionaryId( dictFiles );
// See if there's a zip file with resources present. If so, include it.
string baseName =
( ( *i )[ i->size() - 4 ] == '.' ) ? string( *i, 0, i->size() - 4 ) : string( *i, 0, i->size() - 7 );
string baseName = ( fileName[ fileName.size() - 4 ] == '.' ) ? string( fileName, 0, fileName.size() - 4 ) :
string( fileName, 0, fileName.size() - 7 );
string zipFileName;
@ -1227,7 +1223,7 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
if ( Dictionary::needToRebuildIndex( dictFiles, indexFile )
|| indexIsOldOrBad( indexFile, zipFileName.size() ) ) {
GlsScanner scanner( *i );
GlsScanner scanner( fileName );
try { // Here we intercept any errors during the read to save line at
// which the incident happened. We need alive scanner for that.
@ -1309,8 +1305,8 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
uint32_t articleSize = curOffset - articleOffset;
chunks.addToBlock( &articleSize, sizeof( articleSize ) );
for ( list< wstring >::iterator j = allEntryWords.begin(); j != allEntryWords.end(); ++j )
indexedWords.addWord( *j, descOffset );
for ( auto & allEntryWord : allEntryWords )
indexedWords.addWord( allEntryWord, descOffset );
++articleCount;
wordCount += allEntryWords.size();
@ -1395,7 +1391,7 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
dictionaries.push_back( std::make_shared< GlsDictionary >( dictId, indexFile, dictFiles ) );
}
catch ( std::exception & e ) {
gdWarning( "GLS dictionary reading failed: %s:%u, error: %s\n", i->c_str(), atLine, e.what() );
gdWarning( "GLS dictionary reading failed: %s:%u, error: %s\n", fileName.c_str(), atLine, e.what() );
}
}

View file

@ -360,8 +360,8 @@ void HunspellHeadwordsRequest::run()
getSuggestionsForExpression( trimmedWord, results, hunspellMutex, hunspell );
QMutexLocker _( &dataMutex );
for ( unsigned i = 0; i < results.size(); i++ )
matches.push_back( results[ i ] );
for ( const auto & result : results )
matches.push_back( result );
}
else {
QVector< wstring > suggestions = suggest( trimmedWord, hunspellMutex, hunspell );
@ -369,8 +369,8 @@ void HunspellHeadwordsRequest::run()
if ( !suggestions.empty() ) {
QMutexLocker _( &dataMutex );
for ( int x = 0; x < suggestions.size(); ++x )
matches.push_back( suggestions[ x ] );
for ( const auto & suggestion : suggestions )
matches.push_back( suggestion );
}
}
@ -396,8 +396,8 @@ QVector< wstring > suggest( wstring & word, QMutex & hunspellMutex, Hunspell & h
static QRegExp cutStem( R"(^\s*st:(((\s+(?!\w{2}:)(?!-)(?!\+))|\S+)+))" );
for ( vector< string >::size_type x = 0; x < suggestions.size(); ++x ) {
QString suggestion = QString::fromStdU32String( decodeFromHunspell( hunspell, suggestions[ x ].c_str() ) );
for ( const auto & x : suggestions ) {
QString suggestion = QString::fromStdU32String( decodeFromHunspell( hunspell, x.c_str() ) );
// Strip comments
int n = suggestion.indexOf( '#' );
@ -562,11 +562,11 @@ void getSuggestionsForExpression( wstring const & expression,
QVector< wstring > results;
for ( int i = 0; i < words.size(); i++ ) {
word = words.at( i );
for ( const auto & i : words ) {
word = i;
if ( Folding::isPunct( word[ 0 ] ) || Folding::isWhitespace( word[ 0 ] ) ) {
for ( int j = 0; j < results.size(); j++ )
results[ j ].append( word );
for ( auto & result : results )
result.append( word );
}
else {
QVector< wstring > sugg = suggest( word, hunspellMutex, hunspell );
@ -594,9 +594,9 @@ void getSuggestionsForExpression( wstring const & expression,
}
}
for ( int i = 0; i < results.size(); i++ )
if ( results.at( i ) != trimmedWord )
suggestions.push_back( results.at( i ) );
for ( const auto & result : results )
if ( result != trimmedWord )
suggestions.push_back( result );
}
string encodeToHunspell( Hunspell & hunspell, wstring const & str )
@ -642,9 +642,9 @@ vector< sptr< Dictionary::Class > > makeDictionaries( Config::Hunspell const & c
vector< DataFiles > dataFiles = findDataFiles( cfg.dictionariesPath );
for ( int x = 0; x < cfg.enabledDictionaries.size(); ++x ) {
for ( const auto & enabledDictionarie : cfg.enabledDictionaries ) {
for ( unsigned d = dataFiles.size(); d--; ) {
if ( dataFiles[ d ].dictId == cfg.enabledDictionaries[ x ] ) {
if ( dataFiles[ d ].dictId == enabledDictionarie ) {
// Found it
vector< string > dictFiles;

View file

@ -79,10 +79,10 @@ LoadDictionaries::LoadDictionaries( Config::Class const & cfg ):
<< "*.mdx"
<< "*.gls"
<< "*.gls.dz"
<< "*.slob"
#ifdef MAKE_ZIM_SUPPORT
<< "*.zim"
<< "*.zimaa"
<< "*.slob"
#endif
#ifndef NO_EPWING_SUPPORT
<< "*catalogs"
@ -93,8 +93,10 @@ LoadDictionaries::LoadDictionaries( Config::Class const & cfg ):
void LoadDictionaries::run()
{
try {
for ( const auto & path : paths )
for ( const auto & path : paths ) {
qDebug() << "handle path:" << path.path;
handlePath( path );
}
// Make soundDirs
{
@ -111,6 +113,23 @@ void LoadDictionaries::run()
dictionaries.insert( dictionaries.end(), hunspellDictionaries.begin(), hunspellDictionaries.end() );
}
//handle the custom dictionary name&fts option
for ( const auto & dict : dictionaries ) {
auto baseDir = dict->getContainingFolder();
if ( baseDir.isEmpty() )
continue;
auto filePath = Utils::Path::combine( baseDir, "metadata.toml" );
auto dictMetaData = Metadata::load( filePath.toStdString() );
if ( dictMetaData && dictMetaData->name ) {
dict->setName( dictMetaData->name.value() );
}
if ( dictMetaData && dictMetaData->fullindex ) {
dict->setFtsEnable( dictMetaData->fullindex.value() );
}
}
exceptionText.clear();
}
catch ( std::exception & e ) {
@ -157,28 +176,13 @@ void LoadDictionaries::handlePath( Config::Path const & path )
addDicts( ZipSounds::makeDictionaries( allFiles, Config::getIndexDir().toStdString(), *this ) );
addDicts( Mdx::makeDictionaries( allFiles, Config::getIndexDir().toStdString(), *this ) );
addDicts( Gls::makeDictionaries( allFiles, Config::getIndexDir().toStdString(), *this ) );
addDicts( Slob::makeDictionaries( allFiles, Config::getIndexDir().toStdString(), *this, maxHeadwordToExpand ) );
#ifdef MAKE_ZIM_SUPPORT
addDicts( Zim::makeDictionaries( allFiles, Config::getIndexDir().toStdString(), *this, maxHeadwordToExpand ) );
addDicts( Slob::makeDictionaries( allFiles, Config::getIndexDir().toStdString(), *this, maxHeadwordToExpand ) );
#endif
#ifndef NO_EPWING_SUPPORT
addDicts( Epwing::makeDictionaries( allFiles, Config::getIndexDir().toStdString(), *this ) );
#endif
//handle the custom dictionary name
for ( const auto & dict : dictionaries ) {
auto baseDir = dict->getContainingFolder();
if ( baseDir.isEmpty() )
continue;
auto filePath = Utils::Path::combine( baseDir, "metadata.toml" );
auto dictMetaData = Metadata::load( filePath.toStdString() );
if ( dictMetaData && dictMetaData->name ) {
dict->setName( dictMetaData->name.value() );
}
}
}
void LoadDictionaries::indexingDictionary( string const & dictionaryName ) noexcept
@ -186,6 +190,11 @@ void LoadDictionaries::indexingDictionary( string const & dictionaryName ) noexc
emit indexingDictionarySignal( QString::fromUtf8( dictionaryName.c_str() ) );
}
void LoadDictionaries::loadingDictionary( string const & dictionaryName ) noexcept
{
emit loadingDictionarySignal( QString::fromUtf8( dictionaryName.c_str() ) );
}
void loadDictionaries( QWidget * parent,
bool showInitially,
@ -203,6 +212,7 @@ void loadDictionaries( QWidget * parent,
LoadDictionaries loadDicts( cfg );
QObject::connect( &loadDicts, &LoadDictionaries::indexingDictionarySignal, &init, &Initializing::indexing );
QObject::connect( &loadDicts, &LoadDictionaries::loadingDictionarySignal, &init, &Initializing::loading );
QEventLoop localLoop;
@ -310,6 +320,6 @@ void loadDictionaries( QWidget * parent,
void doDeferredInit( std::vector< sptr< Dictionary::Class > > & dictionaries )
{
for ( unsigned x = 0; x < dictionaries.size(); ++x )
dictionaries[ x ]->deferredInit();
for ( const auto & dictionarie : dictionaries )
dictionarie->deferredInit();
}

View file

@ -48,6 +48,7 @@ public:
public:
virtual void indexingDictionary( std::string const & dictionaryName ) noexcept;
virtual void loadingDictionary( std::string const & dictionaryName ) noexcept;
private:
@ -58,6 +59,7 @@ private:
signals:
void indexingDictionarySignal( QString const & dictionaryName );
void loadingDictionarySignal( QString const & dictionaryName );
};
/// Loads all dictionaries mentioned in the configuration passed, into the

View file

@ -209,10 +209,10 @@ sptr< Dictionary::DataRequest > LsaDictionary::getArticle( wstring const & word,
{
vector< WordArticleLink > chain = findArticles( word, ignoreDiacritics );
for ( unsigned x = 0; x < alts.size(); ++x ) {
for ( const auto & alt : alts ) {
/// Make an additional query for each alt
vector< WordArticleLink > altChain = findArticles( alts[ x ], ignoreDiacritics );
vector< WordArticleLink > altChain = findArticles( alt, ignoreDiacritics );
chain.insert( chain.end(), altChain.begin(), altChain.end() );
}
@ -227,8 +227,8 @@ sptr< Dictionary::DataRequest > LsaDictionary::getArticle( wstring const & word,
if ( ignoreDiacritics )
wordCaseFolded = Folding::applyDiacriticsOnly( wordCaseFolded );
for ( unsigned x = 0; x < chain.size(); ++x ) {
if ( articlesIncluded.find( chain[ x ].articleOffset ) != articlesIncluded.end() )
for ( auto & x : chain ) {
if ( articlesIncluded.find( x.articleOffset ) != articlesIncluded.end() )
continue; // We already have this article in the body.
// Ok. Now, does it go to main articles, or to alternate ones? We list
@ -236,15 +236,15 @@ sptr< Dictionary::DataRequest > LsaDictionary::getArticle( wstring const & word,
// We do the case-folded comparison here.
wstring headwordStripped = Folding::applySimpleCaseOnly( chain[ x ].word );
wstring headwordStripped = Folding::applySimpleCaseOnly( x.word );
if ( ignoreDiacritics )
headwordStripped = Folding::applyDiacriticsOnly( headwordStripped );
multimap< wstring, string > & mapToUse = ( wordCaseFolded == headwordStripped ) ? mainArticles : alternateArticles;
mapToUse.insert( std::pair( Folding::applySimpleCaseOnly( chain[ x ].word ), chain[ x ].word ) );
mapToUse.insert( std::pair( Folding::applySimpleCaseOnly( x.word ), x.word ) );
articlesIncluded.insert( chain[ x ].articleOffset );
articlesIncluded.insert( x.articleOffset );
}
if ( mainArticles.empty() && alternateArticles.empty() )

View file

@ -261,7 +261,7 @@ public:
if ( !ensureInitDone().empty() )
return;
can_FTS = fts.enabled && !fts.disabledTypes.contains( "MDICT", Qt::CaseInsensitive )
can_FTS = enable_FTS && fts.enabled && !fts.disabledTypes.contains( "MDICT", Qt::CaseInsensitive )
&& ( fts.maxDictionarySize == 0 || getArticleCount() <= fts.maxDictionarySize );
}
@ -324,13 +324,8 @@ MdxDictionary::MdxDictionary( string const & id, string const & indexFile, vecto
// Full-text search parameters
can_FTS = true;
ftsIdxName = indexFile + Dictionary::getFtsSuffix();
if ( !Dictionary::needToRebuildIndex( dictionaryFiles, ftsIdxName ) && !FtsHelpers::ftsIndexIsOldOrBad( this ) )
FTS_index_completed.ref();
cacheDirName = QDir::tempPath() + QDir::separator() + QString::fromUtf8( getId().c_str() ) + ".cache";
}
@ -1299,6 +1294,8 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
vector< string > dictFiles( 1, fileName );
findResourceFiles( fileName, dictFiles );
initializing.loadingDictionary( fileName );
string dictId = Dictionary::makeDictionaryId( dictFiles );
string indexFile = indicesDir + dictId;

View file

@ -417,8 +417,8 @@ MediaWikiArticleRequest::MediaWikiArticleRequest( wstring const & str,
addQuery( mgr, str );
for ( unsigned x = 0; x < alts.size(); ++x )
addQuery( mgr, alts[ x ] );
for ( const auto & alt : alts )
addQuery( mgr, alt );
}
void MediaWikiArticleRequest::addQuery( QNetworkAccessManager & mgr, wstring const & str )
@ -455,9 +455,9 @@ void MediaWikiArticleRequest::requestFinished( QNetworkReply * r )
bool found = false;
for ( NetReplies::iterator i = netReplies.begin(); i != netReplies.end(); ++i ) {
if ( i->first == r ) {
i->second = true; // Mark as finished
for ( auto & netReplie : netReplies ) {
if ( netReplie.first == r ) {
netReplie.second = true; // Mark as finished
found = true;
break;
}
@ -558,7 +558,12 @@ void MediaWikiArticleRequest::requestFinished( QNetworkReply * r )
QRegularExpressionMatch match2 = reg2.match( tag );
if ( match2.hasMatch() ) {
QString ref = match2.captured( 1 );
QString audio_url = "<a href=\"" + ref
// audio url may like this <a href="//upload.wikimedia.org/wikipedia/a.ogg"
if ( ref.startsWith( "//" ) ) {
ref = wikiUrl.scheme() + ":" + ref;
}
auto script = addAudioLink( "\"" + ref + "\"", this->dictPtr->getId() );
QString audio_url = QString::fromStdString( script ) + "<a href=\"" + ref
+ R"("><img src="qrc:///icons/playsound.png" border="0" align="absmiddle" alt="Play"/></a>)";
articleNewString += audio_url;
}
@ -571,14 +576,6 @@ void MediaWikiArticleRequest::requestFinished( QNetworkReply * r )
articleNewString.clear();
}
// audio url
articleString.replace(
QRegularExpression(
"<a\\s+href=\"(//upload\\.wikimedia\\.org/wikipedia/[^\"'&]*\\.og[ga](?:\\.mp3|))\"" ),
QString::fromStdString(
addAudioLink( string( "\"" ) + wikiUrl.scheme().toStdString() + ":\\1\"", this->dictPtr->getId() )
+ "<a href=\"" + wikiUrl.scheme().toStdString() + ":\\1\"" ) );
// Add url scheme to image source urls
articleString.replace( " src=\"//", " src=\"" + wikiUrl.scheme() + "://" );
@ -696,12 +693,12 @@ makeDictionaries( Dictionary::Initializing &, Config::MediaWikis const & wikis,
{
vector< sptr< Dictionary::Class > > result;
for ( int x = 0; x < wikis.size(); ++x ) {
if ( wikis[ x ].enabled )
result.push_back( std::make_shared< MediaWikiDictionary >( wikis[ x ].id.toStdString(),
wikis[ x ].name.toUtf8().data(),
wikis[ x ].url,
wikis[ x ].icon,
for ( const auto & wiki : wikis ) {
if ( wiki.enabled )
result.push_back( std::make_shared< MediaWikiDictionary >( wiki.id.toStdString(),
wiki.name.toUtf8().data(),
wiki.url,
wiki.icon,
mgr ) );
}

View file

@ -322,8 +322,8 @@ void ProgramWordSearchRequest::instanceFinished( QByteArray output, QString erro
output.replace( "\r\n", "\n" );
QStringList result = QString::fromUtf8( output ).split( "\n", Qt::SkipEmptyParts );
for ( int x = 0; x < result.size(); ++x )
matches.push_back( Dictionary::WordMatch( gd::toWString( result[ x ] ) ) );
for ( const auto & x : result )
matches.push_back( Dictionary::WordMatch( gd::toWString( x ) ) );
if ( !error.isEmpty() )
setErrorString( error );
@ -342,9 +342,9 @@ vector< sptr< Dictionary::Class > > makeDictionaries( Config::Programs const & p
{
vector< sptr< Dictionary::Class > > result;
for ( Config::Programs::const_iterator i = programs.begin(); i != programs.end(); ++i )
if ( i->enabled )
result.push_back( std::make_shared< ProgramsDictionary >( *i ) );
for ( const auto & program : programs )
if ( program.enabled )
result.push_back( std::make_shared< ProgramsDictionary >( program ) );
return result;
}

View file

@ -177,7 +177,7 @@ public:
void setFTSParameters( Config::FullTextSearch const & fts ) override
{
can_FTS = fts.enabled && !fts.disabledTypes.contains( "SDICT", Qt::CaseInsensitive )
can_FTS = enable_FTS && fts.enabled && !fts.disabledTypes.contains( "SDICT", Qt::CaseInsensitive )
&& ( fts.maxDictionarySize == 0 || getArticleCount() <= fts.maxDictionarySize );
}
@ -218,12 +218,8 @@ SdictDictionary::SdictDictionary( string const & id,
// Full-text search parameters
can_FTS = true;
ftsIdxName = indexFile + Dictionary::getFtsSuffix();
if ( !Dictionary::needToRebuildIndex( dictionaryFiles, ftsIdxName ) && !FtsHelpers::ftsIndexIsOldOrBad( this ) )
FTS_index_completed.ref();
}
SdictDictionary::~SdictDictionary()
@ -259,8 +255,8 @@ string SdictDictionary::convert( string const & in )
bool afterEol = false;
for ( string::const_iterator i = in.begin(), j = in.end(); i != j; ++i ) {
switch ( *i ) {
for ( char i : in ) {
switch ( i ) {
case '\n':
afterEol = true;
inConverted.append( "<br/>" );
@ -274,7 +270,7 @@ string SdictDictionary::convert( string const & in )
// Fall-through
default:
inConverted.push_back( *i );
inConverted.push_back( i );
afterEol = false;
}
}
@ -490,10 +486,10 @@ void SdictArticleRequest::run()
vector< WordArticleLink > chain = dict.findArticles( word, ignoreDiacritics );
for ( unsigned x = 0; x < alts.size(); ++x ) {
for ( const auto & alt : alts ) {
/// Make an additional query for each alt
vector< WordArticleLink > altChain = dict.findArticles( alts[ x ], ignoreDiacritics );
vector< WordArticleLink > altChain = dict.findArticles( alt, ignoreDiacritics );
chain.insert( chain.end(), altChain.begin(), altChain.end() );
}
@ -508,23 +504,23 @@ void SdictArticleRequest::run()
if ( ignoreDiacritics )
wordCaseFolded = Folding::applyDiacriticsOnly( wordCaseFolded );
for ( unsigned x = 0; x < chain.size(); ++x ) {
for ( auto & x : chain ) {
if ( Utils::AtomicInt::loadAcquire( isCancelled ) ) {
finish();
return;
}
if ( articlesIncluded.find( chain[ x ].articleOffset ) != articlesIncluded.end() )
if ( articlesIncluded.find( x.articleOffset ) != articlesIncluded.end() )
continue; // We already have this article in the body.
// Now grab that article
string headword, articleText;
headword = chain[ x ].word;
headword = x.word;
try {
dict.loadArticle( chain[ x ].articleOffset, articleText );
dict.loadArticle( x.articleOffset, articleText );
// Ok. Now, does it go to main articles, or to alternate ones? We list
// main ones first, and alternates after.
@ -540,7 +536,7 @@ void SdictArticleRequest::run()
mapToUse.insert( pair( Folding::applySimpleCaseOnly( headword ), pair( headword, articleText ) ) );
articlesIncluded.insert( chain[ x ].articleOffset );
articlesIncluded.insert( x.articleOffset );
}
catch ( std::exception & ex ) {
gdWarning( "SDict: Failed loading article from \"%s\", reason: %s\n", dict.getName().c_str(), ex.what() );
@ -662,15 +658,15 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
{
vector< sptr< Dictionary::Class > > dictionaries;
for ( vector< string >::const_iterator i = fileNames.begin(); i != fileNames.end(); ++i ) {
for ( const auto & fileName : fileNames ) {
// Skip files with the extensions different to .dct to speed up the
// scanning
if ( i->size() < 4 || strcasecmp( i->c_str() + ( i->size() - 4 ), ".dct" ) != 0 )
if ( fileName.size() < 4 || strcasecmp( fileName.c_str() + ( fileName.size() - 4 ), ".dct" ) != 0 )
continue;
// Got the file -- check if we need to rebuid the index
vector< string > dictFiles( 1, *i );
vector< string > dictFiles( 1, fileName );
string dictId = Dictionary::makeDictionaryId( dictFiles );
@ -678,15 +674,15 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
if ( Dictionary::needToRebuildIndex( dictFiles, indexFile ) || indexIsOldOrBad( indexFile ) ) {
try {
gdDebug( "SDict: Building the index for dictionary: %s\n", i->c_str() );
gdDebug( "SDict: Building the index for dictionary: %s\n", fileName.c_str() );
File::Class df( *i, "rb" );
File::Class df( fileName, "rb" );
DCT_header dictHeader;
df.read( &dictHeader, sizeof( dictHeader ) );
if ( strncmp( dictHeader.signature, "sdct", 4 ) ) {
gdWarning( "File \"%s\" is not valid SDictionary file", i->c_str() );
gdWarning( "File \"%s\" is not valid SDictionary file", fileName.c_str() );
continue;
}
int compression = dictHeader.compression & 0x0F;
@ -781,7 +777,7 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
idx.write( &idxHeader, sizeof( idxHeader ) );
}
catch ( std::exception & e ) {
gdWarning( "Sdictionary dictionary indexing failed: %s, error: %s\n", i->c_str(), e.what() );
gdWarning( "Sdictionary dictionary indexing failed: %s, error: %s\n", fileName.c_str(), e.what() );
continue;
}
catch ( ... ) {
@ -793,7 +789,7 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
dictionaries.push_back( std::make_shared< SdictDictionary >( dictId, indexFile, dictFiles ) );
}
catch ( std::exception & e ) {
gdWarning( "Sdictionary dictionary initializing failed: %s, error: %s\n", i->c_str(), e.what() );
gdWarning( "Sdictionary dictionary initializing failed: %s, error: %s\n", fileName.c_str(), e.what() );
}
}
return dictionaries;

View file

@ -1,50 +1,47 @@
/* This file is (c) 2015 Abs62
* Part of GoldenDict. Licensed under GPLv3 or later, see the LICENSE file */
#ifdef MAKE_ZIM_SUPPORT
#include "slob.hh"
#include "btreeidx.hh"
#include "slob.hh"
#include "btreeidx.hh"
#include "folding.hh"
#include "gddebug.hh"
#include "utf8.hh"
#include "decompress.hh"
#include "langcoder.hh"
#include "wstring.hh"
#include "wstring_qt.hh"
#include "ftshelpers.hh"
#include "htmlescape.hh"
#include "filetype.hh"
#include "tiff.hh"
#include "utils.hh"
#include "folding.hh"
#include "gddebug.hh"
#include "utf8.hh"
#include "decompress.hh"
#include "langcoder.hh"
#include "wstring_qt.hh"
#include "ftshelpers.hh"
#include "htmlescape.hh"
#include "filetype.hh"
#include "tiff.hh"
#include "utils.hh"
#ifdef _MSC_VER
#include <stub_msvc.h>
#endif
#ifdef _MSC_VER
#include <stub_msvc.h>
#endif
#include <QString>
#include <QFile>
#include <QFileInfo>
#include <QDir>
#include <QTextCodec>
#include <QMap>
#include <QPair>
#include <QRegExp>
#if ( QT_VERSION >= QT_VERSION_CHECK( 6, 0, 0 ) )
#include <QtCore5Compat>
#endif
#include <QProcess>
#include <QVector>
#include <QtAlgorithms>
#include <QString>
#include <QFile>
#include <QFileInfo>
#include <QDir>
#include <QTextCodec>
#include <QMap>
#include <QPair>
#include <QRegExp>
#if ( QT_VERSION >= QT_VERSION_CHECK( 6, 0, 0 ) )
#include <QtCore5Compat>
#endif
#include <QProcess>
#include <QVector>
#include <QRegularExpression>
#include <QRegularExpression>
#include <string>
#include <vector>
#include <map>
#include <set>
#include <algorithm>
#include <string>
#include <vector>
#include <map>
#include <set>
#include <algorithm>
namespace Slob {
@ -643,7 +640,7 @@ public:
void setFTSParameters( Config::FullTextSearch const & fts ) override
{
can_FTS = fts.enabled && !fts.disabledTypes.contains( "SLOB", Qt::CaseInsensitive )
can_FTS = enable_FTS && fts.enabled && !fts.disabledTypes.contains( "SLOB", Qt::CaseInsensitive )
&& ( fts.maxDictionarySize == 0 || getArticleCount() <= fts.maxDictionarySize );
}
@ -705,13 +702,8 @@ SlobDictionary::SlobDictionary( string const & id, string const & indexFile, vec
// Full-text search parameters
can_FTS = true;
ftsIdxName = indexFile + Dictionary::getFtsSuffix();
if ( !Dictionary::needToRebuildIndex( dictionaryFiles, ftsIdxName ) && !FtsHelpers::ftsIndexIsOldOrBad( this ) )
FTS_index_completed.ref();
texCgiPath = Config::getProgramDataDir() + "/mimetex.cgi";
if ( QFileInfo( texCgiPath ).exists() ) {
QString dirName = QString::fromStdString( getId() );
@ -1186,10 +1178,10 @@ void SlobArticleRequest::run()
vector< WordArticleLink > chain = dict.findArticles( word, ignoreDiacritics );
for ( unsigned x = 0; x < alts.size(); ++x ) {
for ( const auto & alt : alts ) {
/// Make an additional query for each alt
vector< WordArticleLink > altChain = dict.findArticles( alts[ x ], ignoreDiacritics );
vector< WordArticleLink > altChain = dict.findArticles( alt, ignoreDiacritics );
chain.insert( chain.end(), altChain.begin(), altChain.end() );
}
@ -1204,14 +1196,13 @@ void SlobArticleRequest::run()
if ( ignoreDiacritics )
wordCaseFolded = Folding::applyDiacriticsOnly( wordCaseFolded );
for ( unsigned x = 0; x < chain.size(); ++x ) {
for ( auto & x : chain ) {
if ( Utils::AtomicInt::loadAcquire( isCancelled ) ) {
finish();
return;
}
quint64 pos =
dict.getArticlePos( chain[ x ].articleOffset ); // Several "articleOffset" values may refer to one article
quint64 pos = dict.getArticlePos( x.articleOffset ); // Several "articleOffset" values may refer to one article
if ( articlesIncluded.find( pos ) != articlesIncluded.end() )
continue; // We already have this article in the body.
@ -1220,9 +1211,9 @@ void SlobArticleRequest::run()
string headword, articleText;
headword = chain[ x ].word;
headword = x.word;
try {
dict.loadArticle( chain[ x ].articleOffset, articleText );
dict.loadArticle( x.articleOffset, articleText );
}
catch ( ... ) {
}
@ -1386,17 +1377,17 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
{
vector< sptr< Dictionary::Class > > dictionaries;
for ( vector< string >::const_iterator i = fileNames.begin(); i != fileNames.end(); ++i ) {
for ( const auto & fileName : fileNames ) {
// Skip files with the extensions different to .slob to speed up the
// scanning
QString firstName = QDir::fromNativeSeparators( i->c_str() );
QString firstName = QDir::fromNativeSeparators( fileName.c_str() );
if ( !firstName.endsWith( ".slob" ) )
continue;
// Got the file -- check if we need to rebuid the index
vector< string > dictFiles( 1, *i );
vector< string > dictFiles( 1, fileName );
string dictId = Dictionary::makeDictionaryId( dictFiles );
@ -1406,7 +1397,7 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
if ( Dictionary::needToRebuildIndex( dictFiles, indexFile ) || indexIsOldOrBad( indexFile ) ) {
SlobFile sf;
gdDebug( "Slob: Building the index for dictionary: %s\n", i->c_str() );
gdDebug( "Slob: Building the index for dictionary: %s\n", fileName.c_str() );
sf.open( firstName );
@ -1498,7 +1489,7 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
dictionaries.push_back( std::make_shared< SlobDictionary >( dictId, indexFile, dictFiles ) );
}
catch ( std::exception & e ) {
gdWarning( "Slob dictionary initializing failed: %s, error: %s\n", i->c_str(), e.what() );
gdWarning( "Slob dictionary initializing failed: %s, error: %s\n", fileName.c_str(), e.what() );
continue;
}
catch ( ... ) {
@ -1510,5 +1501,3 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
}
} // namespace Slob
#endif

View file

@ -1,9 +1,7 @@
#ifndef __SLOB_HH_INCLUDED__
#define __SLOB_HH_INCLUDED__
#ifdef MAKE_ZIM_SUPPORT
#include "dictionary.hh"
#include "dictionary.hh"
/// Support for the Slob dictionaries.
namespace Slob {
@ -18,6 +16,4 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
} // namespace Slob
#endif
#endif // __SLOB_HH_INCLUDED__

View file

@ -132,10 +132,10 @@ sptr< Dictionary::DataRequest > SoundDirDictionary::getArticle( wstring const &
{
vector< WordArticleLink > chain = findArticles( word, ignoreDiacritics );
for ( unsigned x = 0; x < alts.size(); ++x ) {
for ( const auto & alt : alts ) {
/// Make an additional query for each alt
vector< WordArticleLink > altChain = findArticles( alts[ x ], ignoreDiacritics );
vector< WordArticleLink > altChain = findArticles( alt, ignoreDiacritics );
chain.insert( chain.end(), altChain.begin(), altChain.end() );
}
@ -422,8 +422,8 @@ vector< sptr< Dictionary::Class > > makeDictionaries( Config::SoundDirs const &
{
vector< sptr< Dictionary::Class > > dictionaries;
for ( Config::SoundDirs::const_iterator i = soundDirs.begin(); i != soundDirs.end(); ++i ) {
QDir dir( i->path );
for ( const auto & soundDir : soundDirs ) {
QDir dir( soundDir.path );
if ( !dir.exists() )
continue; // No such dir, no dictionary then
@ -441,9 +441,9 @@ vector< sptr< Dictionary::Class > > makeDictionaries( Config::SoundDirs const &
if ( Dictionary::needToRebuildIndex( dictFiles, indexFile ) || indexIsOldOrBad( indexFile ) ) {
// Building the index
qDebug() << "Sounds: Building the index for directory: " << i->path;
qDebug() << "Sounds: Building the index for directory: " << soundDir.path;
initializing.indexingDictionary( i->name.toUtf8().data() );
initializing.indexingDictionary( soundDir.name.toUtf8().data() );
File::Class idx( indexFile, "wb" );
@ -488,10 +488,10 @@ vector< sptr< Dictionary::Class > > makeDictionaries( Config::SoundDirs const &
}
dictionaries.push_back( std::make_shared< SoundDirDictionary >( dictId,
i->name.toUtf8().data(),
soundDir.name.toUtf8().data(),
indexFile,
dictFiles,
i->iconFilename ) );
soundDir.iconFilename ) );
}
return dictionaries;

View file

@ -205,7 +205,7 @@ public:
void setFTSParameters( Config::FullTextSearch const & fts ) override
{
can_FTS = fts.enabled && !fts.disabledTypes.contains( "STARDICT", Qt::CaseInsensitive )
can_FTS = enable_FTS && fts.enabled && !fts.disabledTypes.contains( "STARDICT", Qt::CaseInsensitive )
&& ( fts.maxDictionarySize == 0 || getArticleCount() <= fts.maxDictionarySize );
}
@ -270,12 +270,7 @@ StardictDictionary::StardictDictionary( string const & id,
// Full-text search parameters
can_FTS = true;
ftsIdxName = indexFile + Dictionary::getFtsSuffix();
if ( !Dictionary::needToRebuildIndex( dictionaryFiles, ftsIdxName ) && !FtsHelpers::ftsIndexIsOldOrBad( this ) )
FTS_index_completed.ref();
}
StardictDictionary::~StardictDictionary()
@ -427,8 +422,7 @@ private:
QString old;
while ( s.compare( old ) != 0 ) {
for ( int i = 0; i < TRANSLATE_TBL_SIZE; ++i ) {
PWSyntaxTranslate & a = t[ i ];
for ( auto & a : t ) {
s.replace( a.re(), a.replacement() );
}
old = s;
@ -1198,7 +1192,7 @@ void StardictHeadwordsRequest::run()
wstring caseFolded = Folding::applySimpleCaseOnly( word );
for ( unsigned x = 0; x < chain.size(); ++x ) {
for ( auto & x : chain ) {
if ( Utils::AtomicInt::loadAcquire( isCancelled ) ) {
finish();
return;
@ -1206,7 +1200,7 @@ void StardictHeadwordsRequest::run()
string headword, articleText;
dict.loadArticle( chain[ x ].articleOffset, headword, articleText );
dict.loadArticle( x.articleOffset, headword, articleText );
wstring headwordDecoded = Utf8::decode( headword );
@ -1290,10 +1284,10 @@ void StardictArticleRequest::run()
//if alts has more than 100 , great probability that the dictionary is wrong produced or parsed.
if ( alts.size() < 100 ) {
for ( unsigned x = 0; x < alts.size(); ++x ) {
for ( const auto & alt : alts ) {
/// Make an additional query for each alt
vector< WordArticleLink > altChain = dict.findArticles( alts[ x ], ignoreDiacritics );
vector< WordArticleLink > altChain = dict.findArticles( alt, ignoreDiacritics );
if ( altChain.size() > 100 ) {
continue;
}
@ -1780,16 +1774,16 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
{
vector< sptr< Dictionary::Class > > dictionaries;
for ( vector< string >::const_iterator i = fileNames.begin(); i != fileNames.end(); ++i ) {
if ( i->size() < 4 || strcasecmp( i->c_str() + ( i->size() - 4 ), ".ifo" ) != 0 )
for ( const auto & fileName : fileNames ) {
if ( fileName.size() < 4 || strcasecmp( fileName.c_str() + ( fileName.size() - 4 ), ".ifo" ) != 0 )
continue;
try {
vector< string > dictFiles( 1, *i );
vector< string > dictFiles( 1, fileName );
string idxFileName, dictFileName, synFileName;
findCorrespondingFiles( *i, idxFileName, dictFileName, synFileName );
findCorrespondingFiles( fileName, idxFileName, dictFileName, synFileName );
dictFiles.push_back( idxFileName );
dictFiles.push_back( dictFileName );
@ -1815,7 +1809,7 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
if ( Dictionary::needToRebuildIndex( dictFiles, indexFile ) || indexIsOldOrBad( indexFile ) ) {
// Building the index
File::Class ifoFile( *i, "r" );
File::Class ifoFile( fileName, "r" );
Ifo ifo( ifoFile );
@ -1964,7 +1958,7 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
dictionaries.push_back( std::make_shared< StardictDictionary >( dictId, indexFile, dictFiles ) );
}
catch ( std::exception & e ) {
gdWarning( "Stardict dictionary initializing failed: %s, error: %s\n", i->c_str(), e.what() );
gdWarning( "Stardict dictionary initializing failed: %s, error: %s\n", fileName.c_str(), e.what() );
}
}

View file

@ -63,8 +63,8 @@ sptr< Dictionary::WordSearchRequest > BaseTransliterationDictionary::findHeadwor
GD_DPRINTF( "alts = %u\n", (unsigned)alts.size() );
for ( unsigned x = 0; x < alts.size(); ++x )
result->getMatches().push_back( alts[ x ] );
for ( const auto & alt : alts )
result->getMatches().push_back( alt );
return result;
}

View file

@ -128,9 +128,9 @@ vector< sptr< Dictionary::Class > > makeDictionaries( Config::VoiceEngines const
{
vector< sptr< Dictionary::Class > > result;
for ( Config::VoiceEngines::const_iterator i = voiceEngines.begin(); i != voiceEngines.end(); ++i ) {
if ( i->enabled )
result.push_back( std::make_shared< VoiceEnginesDictionary >( *i ) );
for ( const auto & voiceEngine : voiceEngines ) {
if ( voiceEngine.enabled )
result.push_back( std::make_shared< VoiceEnginesDictionary >( voiceEngine ) );
}
return result;

View file

@ -504,13 +504,13 @@ vector< sptr< Dictionary::Class > > makeDictionaries( Config::WebSites const & w
{
vector< sptr< Dictionary::Class > > result;
for ( int x = 0; x < ws.size(); ++x ) {
if ( ws[ x ].enabled )
result.push_back( std::make_shared< WebSiteDictionary >( ws[ x ].id.toUtf8().data(),
ws[ x ].name.toUtf8().data(),
ws[ x ].url,
ws[ x ].iconFilename,
ws[ x ].inside_iframe,
for ( const auto & w : ws ) {
if ( w.enabled )
result.push_back( std::make_shared< WebSiteDictionary >( w.id.toUtf8().data(),
w.name.toUtf8().data(),
w.url,
w.iconFilename,
w.inside_iframe,
mgr ) );
}

View file

@ -203,7 +203,7 @@ public:
void setFTSParameters( Config::FullTextSearch const & fts ) override
{
can_FTS = fts.enabled && !fts.disabledTypes.contains( "XDXF", Qt::CaseInsensitive )
can_FTS = enable_FTS && fts.enabled && !fts.disabledTypes.contains( "XDXF", Qt::CaseInsensitive )
&& ( fts.maxDictionarySize == 0 || getArticleCount() <= fts.maxDictionarySize );
}
@ -297,12 +297,7 @@ XdxfDictionary::XdxfDictionary( string const & id, string const & indexFile, vec
// Full-text search parameters
can_FTS = true;
ftsIdxName = indexFile + Dictionary::getFtsSuffix();
if ( !Dictionary::needToRebuildIndex( dictionaryFiles, ftsIdxName ) && !FtsHelpers::ftsIndexIsOldOrBad( this ) )
FTS_index_completed.ref();
}
XdxfDictionary::~XdxfDictionary()
@ -751,9 +746,9 @@ QString readXhtmlData( QXmlStreamReader & stream )
QXmlStreamAttributes attrs = stream.attributes();
for ( int x = 0; x < attrs.size(); ++x ) {
result += Utils::escape( attrs[ x ].name().toString() );
result += "=\"" + Utils::escape( attrs[ x ].value().toString() ) + "\"";
for ( const auto & attr : attrs ) {
result += Utils::escape( attr.name().toString() );
result += "=\"" + Utils::escape( attr.value().toString() ) + "\"";
}
result += ">";

View file

@ -82,8 +82,8 @@ string convert( string const & in,
bool afterEol = false;
for ( string::const_iterator i = in.begin(), j = in.end(); i != j; ++i ) {
switch ( *i ) {
for ( char i : in ) {
switch ( i ) {
case '\n':
afterEol = true;
if ( !isLogicalFormat )
@ -102,7 +102,7 @@ string convert( string const & in,
// Fall-through
default:
inConverted.push_back( *i );
inConverted.push_back( i );
afterEol = false;
}
}
@ -137,7 +137,7 @@ string convert( string const & in,
#else
auto setContentResult = dd.setContent( QByteArray::fromStdString( in_data ) );
if ( !setContentResult ) {
qWarning( "Xdxf2html error, xml parse failed: %s at %d,%d\n",
qWarning( "Xdxf2html error, xml parse failed: %s at %lld,%lld\n",
setContentResult.errorMessage.toStdString().c_str(),
setContentResult.errorLine,
setContentResult.errorColumn );

View file

@ -221,7 +221,7 @@ public:
void setFTSParameters( Config::FullTextSearch const & fts ) override
{
can_FTS = fts.enabled && !fts.disabledTypes.contains( "ZIM", Qt::CaseInsensitive )
can_FTS = enable_FTS && fts.enabled && !fts.disabledTypes.contains( "ZIM", Qt::CaseInsensitive )
&& ( fts.maxDictionarySize == 0 || getArticleCount() <= fts.maxDictionarySize );
}
@ -262,12 +262,7 @@ ZimDictionary::ZimDictionary( string const & id, string const & indexFile, vecto
// Full-text search parameters
can_FTS = true;
ftsIdxName = indexFile + Dictionary::getFtsSuffix();
if ( !Dictionary::needToRebuildIndex( dictionaryFiles, ftsIdxName ) && !FtsHelpers::ftsIndexIsOldOrBad( this ) )
FTS_index_completed.ref();
}
void ZimDictionary::loadIcon() noexcept

View file

@ -171,10 +171,10 @@ sptr< Dictionary::DataRequest > ZipSoundsDictionary::getArticle( wstring const &
{
vector< WordArticleLink > chain = findArticles( word, ignoreDiacritics );
for ( unsigned x = 0; x < alts.size(); ++x ) {
for ( const auto & alt : alts ) {
/// Make an additional query for each alt
vector< WordArticleLink > altChain = findArticles( alts[ x ], ignoreDiacritics );
vector< WordArticleLink > altChain = findArticles( alt, ignoreDiacritics );
chain.insert( chain.end(), altChain.begin(), altChain.end() );
}
@ -189,8 +189,8 @@ sptr< Dictionary::DataRequest > ZipSoundsDictionary::getArticle( wstring const &
if ( ignoreDiacritics )
wordCaseFolded = Folding::applyDiacriticsOnly( wordCaseFolded );
for ( unsigned x = 0; x < chain.size(); ++x ) {
if ( articlesIncluded.find( chain[ x ].articleOffset ) != articlesIncluded.end() )
for ( auto & x : chain ) {
if ( articlesIncluded.find( x.articleOffset ) != articlesIncluded.end() )
continue; // We already have this article in the body.
// Ok. Now, does it go to main articles, or to alternate ones? We list
@ -198,16 +198,16 @@ sptr< Dictionary::DataRequest > ZipSoundsDictionary::getArticle( wstring const &
// We do the case-folded comparison here.
wstring headwordStripped = Folding::applySimpleCaseOnly( chain[ x ].word );
wstring headwordStripped = Folding::applySimpleCaseOnly( x.word );
if ( ignoreDiacritics )
headwordStripped = Folding::applyDiacriticsOnly( headwordStripped );
multimap< wstring, uint32_t > & mapToUse =
( wordCaseFolded == headwordStripped ) ? mainArticles : alternateArticles;
mapToUse.insert( std::pair( Folding::applySimpleCaseOnly( chain[ x ].word ), chain[ x ].articleOffset ) );
mapToUse.insert( std::pair( Folding::applySimpleCaseOnly( x.word ), x.articleOffset ) );
articlesIncluded.insert( chain[ x ].articleOffset );
articlesIncluded.insert( x.articleOffset );
}
if ( mainArticles.empty() && alternateArticles.empty() )
@ -381,18 +381,18 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
(void)initializing;
vector< sptr< Dictionary::Class > > dictionaries;
for ( vector< string >::const_iterator i = fileNames.begin(); i != fileNames.end(); ++i ) {
for ( const auto & fileName : fileNames ) {
/// Only allow .zips extension
if ( i->size() < 5 || strcasecmp( i->c_str() + ( i->size() - 5 ), ".zips" ) != 0 )
if ( fileName.size() < 5 || strcasecmp( fileName.c_str() + ( fileName.size() - 5 ), ".zips" ) != 0 )
continue;
try {
vector< string > dictFiles( 1, *i );
vector< string > dictFiles( 1, fileName );
string dictId = Dictionary::makeDictionaryId( dictFiles );
string indexFile = indicesDir + dictId;
if ( Dictionary::needToRebuildIndex( dictFiles, indexFile ) || indexIsOldOrBad( indexFile ) ) {
gdDebug( "Zips: Building the index for dictionary: %s\n", i->c_str() );
gdDebug( "Zips: Building the index for dictionary: %s\n", fileName.c_str() );
File::Class idx( indexFile, "wb" );
IdxHeader idxHeader;
@ -406,27 +406,27 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
IndexedWords names, zipFileNames;
ChunkedStorage::Writer chunks( idx );
quint32 namesCount;
quint32 namesCount = 0;
IndexedZip zipFile;
if ( zipFile.openZipFile( QDir::fromNativeSeparators( i->c_str() ) ) )
if ( zipFile.openZipFile( QDir::fromNativeSeparators( fileName.c_str() ) ) )
zipFile.indexFile( zipFileNames, &namesCount );
if ( !zipFileNames.empty() ) {
for ( IndexedWords::iterator i = zipFileNames.begin(); i != zipFileNames.end(); ++i ) {
vector< WordArticleLink > links = i->second;
for ( unsigned x = 0; x < links.size(); x++ ) {
for ( auto & zipFileName : zipFileNames ) {
vector< WordArticleLink > links = zipFileName.second;
for ( auto & link : links ) {
// Save original name
uint32_t offset = chunks.startNewBlock();
uint16_t sz = links[ x ].word.size();
uint16_t sz = link.word.size();
chunks.addToBlock( &sz, sizeof( uint16_t ) );
chunks.addToBlock( links[ x ].word.c_str(), sz );
chunks.addToBlock( &links[ x ].articleOffset, sizeof( uint32_t ) );
chunks.addToBlock( link.word.c_str(), sz );
chunks.addToBlock( &link.articleOffset, sizeof( uint32_t ) );
// Remove extension for sound files (like in sound dirs)
wstring word = stripExtension( links[ x ].word );
wstring word = stripExtension( link.word );
if ( !word.empty() )
names.addWord( word, offset );
}
@ -464,7 +464,7 @@ vector< sptr< Dictionary::Class > > makeDictionaries( vector< string > const & f
dictionaries.push_back( std::make_shared< ZipSoundsDictionary >( dictId, indexFile, dictFiles ) );
}
catch ( std::exception & e ) {
gdWarning( "Zipped sounds pack reading failed: %s, error: %s\n", i->c_str(), e.what() );
gdWarning( "Zipped sounds pack reading failed: %s, error: %s\n", fileName.c_str(), e.what() );
}
}

View file

@ -3,7 +3,6 @@
#include "audiooutput.hh"
#include "ffmpegaudio.hh"
#include <math.h>
#include <errno.h>
extern "C" {
@ -44,26 +43,17 @@ AudioService & AudioService::instance()
return a;
}
AudioService::AudioService()
{
// ao_initialize();
}
AudioService::~AudioService()
{
emit cancelPlaying( true );
// ao_shutdown();
}
void AudioService::playMemory( const char * ptr, int size )
{
emit cancelPlaying( false );
QByteArray audioData( ptr, size );
DecoderThread * thread = new DecoderThread( audioData, this );
connect( thread, &DecoderThread::error, this, &AudioService::error );
connect( this, &AudioService::cancelPlaying, thread, &DecoderThread::cancel, Qt::DirectConnection );
connect( thread, &QThread::finished, thread, &QObject::deleteLater );
thread = std::make_shared< DecoderThread >( audioData, this );
thread->start();
}
@ -73,55 +63,19 @@ void AudioService::stop()
emit cancelPlaying( false );
}
struct DecoderContext
{
enum {
kBufferSize = 32768
};
static QMutex deviceMutex_;
QAtomicInt & isCancelled_;
QByteArray audioData_;
QDataStream audioDataStream_;
AVFormatContext * formatContext_;
#if LIBAVCODEC_VERSION_MAJOR < 59
AVCodec * codec_;
#else
const AVCodec * codec_;
#endif
AVCodecContext * codecContext_;
AVIOContext * avioContext_;
AVStream * audioStream_;
// ao_device * aoDevice_;
AudioOutput * audioOutput;
bool avformatOpened_;
SwrContext * swr_;
DecoderContext( QByteArray const & audioData, QAtomicInt & isCancelled );
~DecoderContext();
bool openCodec( QString & errorString );
void closeCodec();
bool openOutputDevice( QString & errorString );
void closeOutputDevice();
bool play( QString & errorString );
bool normalizeAudio( AVFrame * frame, vector< uint8_t > & samples );
void playFrame( AVFrame * frame );
};
DecoderContext::DecoderContext( QByteArray const & audioData, QAtomicInt & isCancelled ):
isCancelled_( isCancelled ),
audioData_( audioData ),
audioDataStream_( audioData_ ),
formatContext_( NULL ),
codec_( NULL ),
codecContext_( NULL ),
avioContext_( NULL ),
audioStream_( NULL ),
formatContext_( nullptr ),
codec_( nullptr ),
codecContext_( nullptr ),
avioContext_( nullptr ),
audioStream_( nullptr ),
audioOutput( new AudioOutput ),
avformatOpened_( false ),
swr_( NULL )
swr_( nullptr )
{
}
@ -163,7 +117,7 @@ bool DecoderContext::openCodec( QString & errorString )
}
// Don't free buffer allocated here (if succeeded), it will be cleaned up automatically.
avioContext_ = avio_alloc_context( avioBuffer, kBufferSize, 0, &audioDataStream_, readAudioData, NULL, NULL );
avioContext_ = avio_alloc_context( avioBuffer, kBufferSize, 0, &audioDataStream_, readAudioData, nullptr, nullptr );
if ( !avioContext_ ) {
av_free( avioBuffer );
errorString = "avio_alloc_context() failed.";
@ -180,13 +134,13 @@ bool DecoderContext::openCodec( QString & errorString )
int ret = 0;
avformatOpened_ = true;
ret = avformat_open_input( &formatContext_, NULL, NULL, NULL );
ret = avformat_open_input( &formatContext_, nullptr, nullptr, nullptr );
if ( ret < 0 ) {
errorString = QString( "avformat_open_input() failed: %1." ).arg( avErrorString( ret ) );
return false;
}
ret = avformat_find_stream_info( formatContext_, NULL );
ret = avformat_find_stream_info( formatContext_, nullptr );
if ( ret < 0 ) {
errorString = QString( "avformat_find_stream_info() failed: %1." ).arg( avErrorString( ret ) );
return false;
@ -216,7 +170,7 @@ bool DecoderContext::openCodec( QString & errorString )
}
avcodec_parameters_to_context( codecContext_, audioStream_->codecpar );
ret = avcodec_open2( codecContext_, codec_, NULL );
ret = avcodec_open2( codecContext_, codec_, nullptr );
if ( ret < 0 ) {
errorString = QString( "avcodec_open2() failed: %1." ).arg( avErrorString( ret ) );
return false;
@ -234,7 +188,7 @@ bool DecoderContext::openCodec( QString & errorString )
codecContext_->channel_layout = layout;
}
swr_ = swr_alloc_set_opts( NULL,
swr_ = swr_alloc_set_opts( nullptr,
layout,
AV_SAMPLE_FMT_S16,
44100,
@ -242,10 +196,10 @@ bool DecoderContext::openCodec( QString & errorString )
codecContext_->sample_fmt,
codecContext_->sample_rate,
0,
NULL );
nullptr );
if ( !swr_ || swr_init( swr_ ) < 0 ) {
av_log( NULL, AV_LOG_ERROR, "Cannot create sample rate converter \n" );
av_log( nullptr, AV_LOG_ERROR, "Cannot create sample rate converter \n" );
swr_free( &swr_ );
return false;
}
@ -262,7 +216,7 @@ void DecoderContext::closeCodec()
if ( !formatContext_ ) {
if ( avioContext_ ) {
av_free( avioContext_->buffer );
avioContext_ = NULL;
avioContext_ = nullptr;
}
return;
}
@ -272,12 +226,12 @@ void DecoderContext::closeCodec()
if ( !avformatOpened_ ) {
if ( formatContext_ ) {
avformat_free_context( formatContext_ );
formatContext_ = NULL;
formatContext_ = nullptr;
}
if ( avioContext_ ) {
av_free( avioContext_->buffer );
avioContext_ = NULL;
avioContext_ = nullptr;
}
return;
}
@ -341,7 +295,7 @@ bool DecoderContext::play( QString & errorString )
}
/* flush the decoder */
packet->data = NULL;
packet->data = nullptr;
packet->size = 0;
int ret = avcodec_send_packet( codecContext_, packet );
while ( ret >= 0 ) {
@ -355,27 +309,36 @@ bool DecoderContext::play( QString & errorString )
return true;
}
void DecoderContext::stop()
{
if ( audioOutput ) {
audioOutput->stop();
audioOutput->deleteLater();
audioOutput = nullptr;
}
}
bool DecoderContext::normalizeAudio( AVFrame * frame, vector< uint8_t > & samples )
{
auto dst_freq = 44100;
auto dst_channels = codecContext_->channels;
int out_count = (int64_t)frame->nb_samples * dst_freq / frame->sample_rate + 256;
int out_size = av_samples_get_buffer_size( NULL, dst_channels, out_count, AV_SAMPLE_FMT_S16, 1 );
int out_size = av_samples_get_buffer_size( nullptr, dst_channels, out_count, AV_SAMPLE_FMT_S16, 1 );
samples.resize( out_size );
uint8_t * data[ 2 ] = { 0 };
uint8_t * data[ 2 ] = { nullptr };
data[ 0 ] = &samples.front();
auto out_nb_samples = swr_convert( swr_, data, out_count, (const uint8_t **)frame->extended_data, frame->nb_samples );
if ( out_nb_samples < 0 ) {
av_log( NULL, AV_LOG_ERROR, "converte fail \n" );
av_log( nullptr, AV_LOG_ERROR, "converte fail \n" );
return false;
}
else {
// qDebug( "out_count:%d, out_nb_samples:%d, frame->nb_samples:%d \n", out_count, out_nb_samples, frame->nb_samples );
}
int actual_size = av_samples_get_buffer_size( NULL, dst_channels, out_nb_samples, AV_SAMPLE_FMT_S16, 1 );
int actual_size = av_samples_get_buffer_size( nullptr, dst_channels, out_nb_samples, AV_SAMPLE_FMT_S16, 1 );
samples.resize( actual_size );
return true;
}
@ -394,19 +357,20 @@ void DecoderContext::playFrame( AVFrame * frame )
DecoderThread::DecoderThread( QByteArray const & audioData, QObject * parent ):
QThread( parent ),
isCancelled_( 0 ),
audioData_( audioData )
audioData_( audioData ),
d( audioData_, isCancelled_ )
{
}
DecoderThread::~DecoderThread()
{
isCancelled_.ref();
d.stop();
}
void DecoderThread::run()
{
QString errorString;
DecoderContext d( audioData_, isCancelled_ );
if ( !d.openCodec( errorString ) ) {
emit error( errorString );
@ -430,6 +394,7 @@ void DecoderThread::run()
void DecoderThread::cancel( bool waitUntilFinished )
{
isCancelled_.ref();
d.stop();
if ( waitUntilFinished )
this->wait();
}

Some files were not shown because too many files have changed in this diff Show more