Compare commits

..

No commits in common. "master" and "v8.5.1" have entirely different histories.

719 changed files with 17895 additions and 57124 deletions

View File

@ -1,5 +0,0 @@
{
"CdmUrls": [
"https://ollj0gz40d.execute-api.us-west-2.amazonaws.com/default/AudibleCdm"
]
}

View File

@ -6,14 +6,10 @@ labels: bug
assignees: '' assignees: ''
--- ---
PLEASE FILL OUT THE FOLLOWING. Bug reports with limited information or lacking an attached log file may get limited or delayed help. **Describe the bug**
___
## Describe the bug
A clear and concise description of what the bug is. A clear and concise description of what the bug is.
## To Reproduce **To Reproduce**
Steps to reproduce the behavior: Steps to reproduce the behavior:
1. Go to '...' 1. Go to '...'
@ -21,23 +17,15 @@ Steps to reproduce the behavior:
3. Scroll down to '....' 3. Scroll down to '....'
4. See error 4. See error
## Expected behavior **Expected behavior**
A clear and concise description of what you expected to happen. A clear and concise description of what you expected to happen.
## Screenshots **Screenshots**
If applicable, add screenshots to help explain your problem. If applicable, add screenshots to help explain your problem.
## Platform **Platform**
[e.g. Windows 10, Windows 11, Mac, Linux (State distribution)] [e.g. Windows 10, Windows 11, Mac, Linux (State distribution)]
## Log Files **Log Files**
Attach your Libation log file here. If your user folder contains the file "LibationCrash.log", attach that also. Attach your Libation log file here.
**Default Log File Locations**
|Platform|Folder|
|-|-|
|Windows|`%userprofile%\Libation`|
|macOS|`~/Library/Application Support/Libation`|
|Linux|`~/.local/share/Libation`|
Alternative, you may open the log file folder from within Libation. Open Libation's settings, and on the first tab in Settings you can click the button 'Open log folder'.

View File

@ -6,18 +6,6 @@ labels: enhancement
assignees: '' assignees: ''
--- ---
**No-go ideas**
There are lots of great ideas and many are beyond what we intend to do for Libation. Some good ideas which we do not intend to pursue:
* comprehensive api/cli
* aax/audiobook import
* bulk rename of existing files
* general metadata/tag editor
* playback features
* web gui
* supporting non-audible vendors
* official docker support
**Is your feature request related to a problem? Please describe.** **Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]

View File

@ -1,8 +0,0 @@
---
version: 2
updates:
# Maintain dependencies for GitHub Actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"

View File

@ -1,132 +0,0 @@
# build-linux.yml
# Reusable workflow that builds the Linux and MacOS (x64 and arm64) versions of Libation.
---
name: build
on:
workflow_call:
inputs:
version_override:
type: string
description: "Version number override"
required: false
run_unit_tests:
type: boolean
description: "Skip running unit tests"
required: false
default: true
runs_on:
type: string
description: "The GitHub hosted runner to use"
required: true
OS:
type: string
description: >
The operating system targeted by the build.
There must be a corresponding Bundle_$OS.sh script file in ./Scripts
required: true
architecture:
type: string
description: "CPU architecture targeted by the build."
required: true
env:
DOTNET_CONFIGURATION: "Release"
DOTNET_VERSION: "9.0.x"
RELEASE_NAME: "chardonnay"
jobs:
build:
name: "${{ inputs.OS }}-${{ inputs.architecture }}"
runs-on: ${{ inputs.runs_on }}
steps:
- uses: actions/checkout@v5
- name: Setup .NET
uses: actions/setup-dotnet@v5
with:
dotnet-version: ${{ env.DOTNET_VERSION }}
env:
NUGET_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Get version
id: get_version
run: |
inputVersion="${{ inputs.version_override }}"
if [[ "${#inputVersion}" -gt 0 ]]
then
version="${inputVersion}"
else
version="$(grep -Eio -m 1 '<Version>.*</Version>' ./Source/AppScaffolding/AppScaffolding.csproj | sed -r 's/<\/?Version>//g')"
fi
echo "version=${version}" >> "${GITHUB_OUTPUT}"
- name: Unit test
if: ${{ inputs.run_unit_tests }}
working-directory: ./Source
run: dotnet test
- name: Publish
id: publish
working-directory: ./Source
run: |
if [[ "${{ inputs.OS }}" == "MacOS" ]]
then
display_os="macOS"
RUNTIME_ID="osx-${{ inputs.architecture }}"
else
display_os="Linux"
RUNTIME_ID="linux-${{ inputs.architecture }}"
fi
OUTPUT="bin/Publish/${display_os}-${{ inputs.architecture }}-${{ env.RELEASE_NAME }}"
echo "display_os=${display_os}" >> $GITHUB_OUTPUT
echo "Runtime Identifier: $RUNTIME_ID"
echo "Output Directory: $OUTPUT"
dotnet publish \
LibationAvalonia/LibationAvalonia.csproj \
--runtime $RUNTIME_ID \
--configuration ${{ env.DOTNET_CONFIGURATION }} \
--output $OUTPUT \
-p:PublishProfile=LibationAvalonia/Properties/PublishProfiles/${display_os}Profile.pubxml
dotnet publish \
LoadByOS/${display_os}ConfigApp/${display_os}ConfigApp.csproj \
--runtime $RUNTIME_ID \
--configuration ${{ env.DOTNET_CONFIGURATION }} \
--output $OUTPUT \
-p:PublishProfile=LoadByOS/Properties/${display_os}ConfigApp/PublishProfiles/${display_os}Profile.pubxml
dotnet publish \
LibationCli/LibationCli.csproj \
--runtime $RUNTIME_ID \
--configuration ${{ env.DOTNET_CONFIGURATION }} \
--output $OUTPUT \
-p:PublishProfile=LibationCli/Properties/PublishProfiles/${display_os}Profile.pubxml
dotnet publish \
HangoverAvalonia/HangoverAvalonia.csproj \
--runtime $RUNTIME_ID \
--configuration ${{ env.DOTNET_CONFIGURATION }} \
--output $OUTPUT \
-p:PublishProfile=HangoverAvalonia/Properties/PublishProfiles/${display_os}Profile.pubxml
- name: Build bundle
id: bundle
working-directory: ./Source/bin/Publish/${{ steps.publish.outputs.display_os }}-${{ inputs.architecture }}-${{ env.RELEASE_NAME }}
run: |
BUNDLE_DIR=$(pwd)
echo "Bundle dir: ${BUNDLE_DIR}"
cd ..
SCRIPT=../../../Scripts/Bundle_${{ inputs.OS }}.sh
chmod +rx ${SCRIPT}
${SCRIPT} "${BUNDLE_DIR}" "${{ steps.get_version.outputs.version }}" "${{ inputs.architecture }}"
artifact=$(ls ./bundle)
echo "artifact=${artifact}" >> "${GITHUB_OUTPUT}"
- name: Publish bundle
uses: actions/upload-artifact@v4
with:
name: ${{ steps.bundle.outputs.artifact }}
path: ./Source/bin/Publish/bundle/${{ steps.bundle.outputs.artifact }}
if-no-files-found: error
retention-days: 7

View File

@ -1,118 +0,0 @@
# build-windows.yml
# Reusable workflow that builds the Windows versions of Libation.
---
name: build
on:
workflow_call:
inputs:
version_override:
type: string
description: "Version number override"
required: false
run_unit_tests:
type: boolean
description: "Skip running unit tests"
required: false
default: true
architecture:
type: string
description: "CPU architecture targeted by the build."
required: true
env:
DOTNET_CONFIGURATION: "Release"
DOTNET_VERSION: "9.0.x"
jobs:
build:
name: "${{ matrix.os }}-${{ matrix.release_name }}-${{ inputs.architecture }}"
runs-on: windows-latest
env:
OUTPUT_NAME: "${{ matrix.os }}-${{ matrix.release_name }}-${{ inputs.architecture }}"
RUNTIME_ID: "win-${{ inputs.architecture }}"
strategy:
matrix:
os: [Windows]
ui: [Avalonia]
release_name: [chardonnay]
include:
- os: Windows
ui: WinForms
release_name: classic
prefix: Classic-
steps:
- uses: actions/checkout@v5
- name: Setup .NET
uses: actions/setup-dotnet@v5
with:
dotnet-version: ${{ env.DOTNET_VERSION }}
env:
NUGET_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Get version
id: get_version
run: |
if ("${{ inputs.version_override }}".length -gt 0) {
$version = "${{ inputs.version_override }}"
} else {
$version = (Select-Xml -Path "./Source/AppScaffolding/AppScaffolding.csproj" -XPath "/Project/PropertyGroup/Version").Node.InnerXML.Trim()
}
"version=$version" >> $env:GITHUB_OUTPUT
- name: Unit test
if: ${{ inputs.run_unit_tests }}
working-directory: ./Source
run: dotnet test
- name: Publish
working-directory: ./Source
run: |
dotnet publish `
Libation${{ matrix.ui }}/Libation${{ matrix.ui }}.csproj `
--runtime ${{ env.RUNTIME_ID }} `
--configuration ${{ env.DOTNET_CONFIGURATION }} `
--output bin/Publish/${{ env.OUTPUT_NAME }} `
-p:PublishProfile=Libation${{ matrix.ui }}/Properties/PublishProfiles/${{ matrix.os }}Profile.pubxml
dotnet publish `
LoadByOS/${{ matrix.os }}ConfigApp/${{ matrix.os }}ConfigApp.csproj `
--runtime ${{ env.RUNTIME_ID }} `
--configuration ${{ env.DOTNET_CONFIGURATION }} `
--output bin/Publish/${{ env.OUTPUT_NAME }} `
-p:PublishProfile=LoadByOS/${{ matrix.os }}ConfigApp/PublishProfiles/${{ matrix.os }}Profile.pubxml
dotnet publish `
LibationCli/LibationCli.csproj `
--runtime ${{ env.RUNTIME_ID }} `
--configuration ${{ env.DOTNET_CONFIGURATION }} `
--output bin/Publish/${{ env.OUTPUT_NAME }} `
-p:DefineConstants="${{ matrix.release_name }}" `
-p:PublishProfile=LibationCli/Properties/PublishProfiles/${{ matrix.os }}Profile.pubxml
dotnet publish `
Hangover${{ matrix.ui }}/Hangover${{ matrix.ui }}.csproj `
--runtime ${{ env.RUNTIME_ID }} `
--configuration ${{ env.DOTNET_CONFIGURATION }} `
--output bin/Publish/${{ env.OUTPUT_NAME }} `
-p:PublishProfile=Hangover${{ matrix.ui }}/Properties/PublishProfiles/${{ matrix.os }}Profile.pubxml
- name: Zip artifact
id: zip
working-directory: ./Source/bin/Publish
run: |
$bin_dir = "${{ env.OUTPUT_NAME }}\"
$delfiles = @(
"WindowsConfigApp.exe",
"WindowsConfigApp.runtimeconfig.json",
"WindowsConfigApp.deps.json"
)
foreach ($file in $delfiles){ if (test-path $bin_dir$file){ Remove-Item $bin_dir$file } }
$artifact="${{ matrix.prefix }}Libation.${{ steps.get_version.outputs.version }}-" + "${{ matrix.os }}".ToLower() + "-${{ matrix.release_name }}-${{ inputs.architecture }}"
"artifact=$artifact" >> $env:GITHUB_OUTPUT
Compress-Archive -Path "${bin_dir}*" -DestinationPath "$artifact.zip"
- name: Publish artifact
uses: actions/upload-artifact@v4
with:
name: ${{ steps.zip.outputs.artifact }}.zip
path: ./Source/bin/Publish/${{ steps.zip.outputs.artifact }}.zip
if-no-files-found: error
retention-days: 7

View File

@ -1,53 +0,0 @@
# build.yml
# Reusable workflow that builds Libation for all platforms.
---
name: build
on:
workflow_call:
inputs:
version_override:
type: string
description: "Version number override"
required: false
run_unit_tests:
type: boolean
description: "Skip running unit tests"
required: false
default: true
jobs:
windows:
strategy:
matrix:
architecture: [x64]
uses: ./.github/workflows/build-windows.yml
with:
version_override: ${{ inputs.version_override }}
run_unit_tests: ${{ inputs.run_unit_tests }}
architecture: ${{ matrix.architecture }}
linux:
strategy:
matrix:
OS: [Redhat, Debian]
architecture: [x64, arm64]
uses: ./.github/workflows/build-linux.yml
with:
version_override: ${{ inputs.version_override }}
runs_on: ubuntu-latest
OS: ${{ matrix.OS }}
architecture: ${{ matrix.architecture }}
run_unit_tests: ${{ inputs.run_unit_tests }}
macos:
strategy:
matrix:
architecture: [x64, arm64]
uses: ./.github/workflows/build-linux.yml
with:
version_override: ${{ inputs.version_override }}
runs_on: macos-latest
OS: MacOS
architecture: ${{ matrix.architecture }}
run_unit_tests: ${{ inputs.run_unit_tests }}

View File

@ -1,63 +0,0 @@
# docker.yml
# Reusable workflow that builds a docker image for Libation.
---
name: docker
on:
workflow_call:
inputs:
version:
type: string
description: "Version number"
required: true
release:
type: boolean
description: "Is this a release build?"
required: true
secrets:
docker_username:
required: true
docker_token:
required: true
jobs:
build_and_push:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v5
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
if: ${{ inputs.release }}
uses: docker/login-action@v3
with:
username: ${{ secrets.docker_username }}
password: ${{ secrets.docker_token }}
- name: Generate docker image tags
id: metadata
uses: docker/metadata-action@v5
with:
flavor: |
latest=true
images: |
name=${{ secrets.docker_username }}/libation
tags: |
type=raw,value=${{ inputs.version }},enable=${{ inputs.release }}
- name: Build and push image
uses: docker/build-push-action@v6
with:
platforms: linux/amd64,linux/arm64
push: ${{ steps.metadata.outputs.tags != ''}}
cache-from: type=gha
cache-to: type=gha,mode=max
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}

View File

@ -1,58 +0,0 @@
# release.yml
# Builds and creates the release on any tags starting with a `v`
---
name: release
on:
push:
tags:
- "v*"
jobs:
prerelease:
runs-on: ubuntu-latest
outputs:
version: ${{ steps.get_version.outputs.version }}
steps:
- name: Get tag version
id: get_version
run: |
export TAG="${{ github.ref_name }}"
echo "version=${TAG#v}" >> "${GITHUB_OUTPUT}"
docker:
needs: [prerelease]
uses: ./.github/workflows/docker.yml
with:
version: ${{ needs.prerelease.outputs.version }}
release: true
secrets:
docker_username: ${{ secrets.DOCKERHUB_USERNAME }}
docker_token: ${{ secrets.DOCKERHUB_TOKEN }}
build:
needs: [prerelease]
uses: ./.github/workflows/build.yml
with:
version_override: ${{ needs.prerelease.outputs.version }}
run_unit_tests: false
release:
needs: [prerelease, build]
runs-on: ubuntu-latest
steps:
- name: Download artifacts
uses: actions/download-artifact@v5
with:
path: artifacts
pattern: "*(Classic-)Libation.*"
- name: Release
id: release
uses: softprops/action-gh-release@v2
with:
name: Libation ${{ needs.prerelease.outputs.version }}
body: <Put a body here>
token: ${{ secrets.GITHUB_TOKEN }}
draft: true
prerelease: false
files: |
artifacts/*/*

View File

@ -1,22 +0,0 @@
name: Validate MetaInfo
"on":
pull_request:
branches: ["master"]
paths:
- .github/workflows/validate-appstream-metainfo.yml
- Source/LoadByOS/LinuxConfigApp/com.getlibation.Libation.metainfo.xml
push:
branches: ["master"]
paths:
- .github/workflows/validate-appstream-metainfo.yml
- Source/LoadByOS/LinuxConfigApp/com.getlibation.Libation.metainfo.xml
jobs:
validate-appstream-metainfo:
runs-on: ubuntu-latest
container:
image: ghcr.io/flathub/flatpak-builder-lint:latest
steps:
- uses: actions/checkout@v5
- name: Check the MetaInfo file
run: flatpak-builder-lint appstream Source/LoadByOS/LinuxConfigApp/com.getlibation.Libation.metainfo.xml

View File

@ -1,21 +0,0 @@
name: Check desktop file
"on":
pull_request:
branches: ["master"]
paths:
- .github/workflows/validate-desktop-file.yml
- Source/LoadByOS/LinuxConfigApp/Libation.desktop
push:
branches: ["master"]
paths:
- .github/workflows/validate-desktop-file.yml
- Source/LoadByOS/LinuxConfigApp/Libation.desktop
jobs:
validate-desktop-file:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- run: sudo apt --yes install desktop-file-utils
- name: Check the desktop file
run: desktop-file-validate Source/LoadByOS/LinuxConfigApp/Libation.desktop

View File

@ -1,22 +0,0 @@
# validate.yml
# Validates that Libation will build on a pull request or push to master.
---
name: validate
on:
push:
branches: [master]
pull_request:
branches: [master]
jobs:
build:
uses: ./.github/workflows/build.yml
docker:
uses: ./.github/workflows/docker.yml
with:
version: ${GITHUB_SHA}
release: false
secrets:
docker_username: ${{ secrets.DOCKERHUB_USERNAME }}
docker_token: ${{ secrets.DOCKERHUB_TOKEN }}

View File

@ -1,10 +1,6 @@
{ {
"WindowsClassic": "Classic-Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-win(?:dows)?-classic-x64\\.zip", "WindowsClassic": "Libation\\.\\d+\\.\\d+\\.\\d+-win-classic\\.zip",
"WindowsAvalonia": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-win(?:dows)?-chardonnay-x64\\.zip", "WindowsAvalonia":"Libation\\.\\d+\\.\\d+\\.\\d+-win-chardonnay\\.zip",
"LinuxAvalonia": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-linux-chardonnay-amd64\\.deb", "LinuxAvalonia": "Libation\\.\\d+\\.\\d+\\.\\d+-linux-chardonnay",
"LinuxAvalonia_RPM": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-linux-chardonnay-amd64\\.rpm", "MacOSAvalonia": "Libation\\.\\d+\\.\\d+\\.\\d+-macos-chardonnay"
"MacOSAvalonia": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-macOS-chardonnay-x64\\.tgz",
"LinuxAvalonia_Arm64": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-linux-chardonnay-arm64\\.deb",
"LinuxAvalonia_Arm64_RPM": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-linux-chardonnay-arm64\\.rpm",
"MacOSAvalonia_Arm64": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-macOS-chardonnay-arm64\\.tgz"
} }

32
.vscode/launch.json vendored
View File

@ -1,32 +0,0 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": ".NET Core Launch (console) Windows",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
"program": "${workspaceFolder}/Source/bin/Avalonia/Debug/Libation.dll",
"args": [],
"cwd": "${workspaceFolder}",
"stopAtEntry": false,
"console": "internalConsole"
},
{
"name": ".NET Core Launch (console) Linux",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build_linux",
"program": "${workspaceFolder}/Source/bin/Avalonia/Debug/Libation.dll",
"args": [],
"cwd": "${workspaceFolder}",
"stopAtEntry": false,
"console": "internalConsole"
}
]
}

59
.vscode/tasks.json vendored
View File

@ -1,59 +0,0 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "build",
"dependsOn": [
"build_libation",
"build_linuxconfigapp"
]
},
{
"label": "build_libation",
"type": "shell",
"command": "dotnet",
"args": [
"build",
"${workspaceFolder}/Source/LibationAvalonia/LibationAvalonia.csproj"
],
"group": "build",
"presentation": {
//"reveal": "silent"
},
"problemMatcher": "$msCompile"
},
{
"label": "build_linuxconfigapp",
"type": "shell",
"command": "dotnet",
"args": [
"build",
"${workspaceFolder}/Source/LoadByOS/LinuxConfigApp/LinuxConfigApp.csproj"
],
"group": "build",
"presentation": {
//"reveal": "silent"
},
"problemMatcher": "$msCompile"
},
{
"label": "build_linux",
"type": "shell",
"command": "dotnet",
"args": [
"build",
"${workspaceFolder}/Source/LibationAvalonia/LibationAvalonia.csproj",
"-p:TargetFramework=net9.0",
"-p:TargetFrameworks=net9.0",
"-p:RuntimeIdentifier=linux-x64"
],
"group": "build",
"presentation": {
//"reveal": "silent"
},
"problemMatcher": "$msCompile"
}
]
}

View File

@ -1,3 +0,0 @@
{
"LibationFiles": "/config-internal"
}

View File

@ -1,174 +0,0 @@
#!/bin/bash
error() {
log "ERROR" "$1"
}
warn() {
log "WARNING" "$1"
}
info() {
log "info" "$1"
}
debug() {
if [ "${LOG_LEVEL}" = "debug" ]; then
log "debug" "$1"
fi
}
log() {
LEVEL=$1
MESSAGE=$2
printf "$(date '+%F %T') %s: %s\n" "${LEVEL}" "${MESSAGE}"
}
init_config_file() {
FILE=$1
FULLPATH=${LIBATION_CONFIG_DIR}/${FILE}
if [ -f ${FULLPATH} ]; then
info "loading ${FILE}"
cp ${FULLPATH} ${LIBATION_CONFIG_INTERNAL}/
return 0
else
warn "${FULLPATH} not found, creating empty file"
echo "{}" > ${LIBATION_CONFIG_INTERNAL}/${FILE}
return 1
fi
}
update_settings() {
FILE=$1
KEY=$2
VALUE=$3
info "setting ${KEY} to ${VALUE}"
echo $(jq --arg k "${KEY}" --arg v "${VALUE}" '.[$k] = $v' ${LIBATION_CONFIG_INTERNAL}/${FILE}) > ${LIBATION_CONFIG_INTERNAL}/${FILE}.tmp
mv ${LIBATION_CONFIG_INTERNAL}/${FILE}.tmp ${LIBATION_CONFIG_INTERNAL}/${FILE}
}
is_mounted() {
DIR=$1
if grep -qs "${DIR} " /proc/mounts;
then
return 0
else
return 1
fi
}
create_db() {
DBFILE=$1
if [ -f "${DBFILE}" ]; then
warn "prexisting database found when creating"
return 0
else
if ! touch "${DBFILE}"; then
error "unable to create database, check permissions on host"
exit 1
fi
return 1
fi
}
setup_db() {
DBPATH=$1
dbpattern="*.db"
debug "using database directory ${DBPATH}"
# Figure out the right databse file
if [[ -z "${LIBATION_DB_FILE}" ]];
then
dbCount=$(find "${DBPATH}" -maxdepth 1 -type f -name "${dbpattern}" | wc -l)
if [ "${dbCount}" -gt 1 ];
then
error "too many database files found, set LIBATION_DB_FILE to the filename you wish to use"
exit 1
elif [ "${dbCount}" -eq 1 ];
then
files=( ${DBPATH}/${dbpattern} )
FILE=${files[0]}
else
FILE="${DBPATH}/LibationContext.db"
fi
else
FILE="${DBPATH}/${LIBATION_DB_FILE}"
fi
debug "planning to use database ${FILE}"
if [ -f "${FILE}" ]; then
info "database found at ${FILE}"
elif [ ${LIBATION_CREATE_DB} = "true" ];
then
warn "database not found, creating one at ${FILE}"
create_db ${FILE}
else
error "database not found and creation is disabled"
exit 1
fi
ln -s "${FILE}" "${LIBATION_CONFIG_INTERNAL}/LibationContext.db"
}
run() {
info "scanning accounts"
/libation/LibationCli scan
info "liberating books"
/libation/LibationCli liberate
}
main() {
info "initializing libation"
init_config_file AccountsSettings.json
init_config_file Settings.json
info "loading settings"
update_settings Settings.json Books "${LIBATION_BOOKS_DIR:-/data}"
update_settings Settings.json InProgress /tmp
info "loading database"
# If user provides a separate database mount, use that
if is_mounted "${LIBATION_DB_DIR}";
then
DB_LOCATION=${LIBATION_DB_DIR}
# Otherwise, use the config directory
else
DB_LOCATION=${LIBATION_CONFIG_DIR}
fi
setup_db ${DB_LOCATION}
# Try to warn if books dir wasn't mounted in
if ! is_mounted "${LIBATION_BOOKS_DIR}";
then
warn "${LIBATION_BOOKS_DIR} does not appear to be mounted, books will not be saved"
fi
# Let the user know what the run type will be
if [[ -z "${SLEEP_TIME}" ]]; then
SLEEP_TIME=-1
fi
if [ "${SLEEP_TIME}" == -1 ]; then
info "running once"
else
info "running every ${SLEEP_TIME}"
fi
# loop
while true
do
run
# Liberate only once if SLEEP_TIME was set to -1
if [ "${SLEEP_TIME}" == -1 ]; then
break
fi
sleep "${SLEEP_TIME}"
done
info "exiting"
}
main

View File

@ -1,39 +0,0 @@
# Dockerfile
FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/dotnet/sdk:9.0 AS build
ARG TARGETARCH
COPY Source /Source
RUN dotnet publish \
/Source/LibationCli/LibationCli.csproj \
--arch ${TARGETARCH} \
--configuration Release \
--output /Source/bin/Publish/Linux-chardonnay \
-p:PublishProfile=/Source/LibationCli/Properties/PublishProfiles/LinuxProfile.pubxml
FROM mcr.microsoft.com/dotnet/runtime:9.0
ARG USER_UID=1001
ARG USER_GID=1001
# Set the character set that will be used for folder and filenames when liberating
ENV LANG=C.UTF-8
ENV LC_ALL=C.UTF-8
ENV SLEEP_TIME=-1
ENV LIBATION_CONFIG_INTERNAL=/config-internal
ENV LIBATION_CONFIG_DIR=/config
ENV LIBATION_DB_DIR=/db
ENV LIBATION_DB_FILE=
ENV LIBATION_CREATE_DB=true
ENV LIBATION_BOOKS_DIR=/data
RUN apt-get update && apt-get -y upgrade && \
apt-get install -y jq && \
mkdir -m777 ${LIBATION_CONFIG_INTERNAL} ${LIBATION_BOOKS_DIR}
COPY --from=build /Source/bin/Publish/Linux-chardonnay /libation
COPY Docker/* /libation
USER ${USER_UID}:${USER_GID}
CMD ["/libation/liberate.sh"]

View File

@ -1,6 +1,6 @@
## [Download Libation](https://github.com/rmcrackan/Libation/releases/latest) ## [Download Libation](https://github.com/rmcrackan/Libation/releases/latest)
### If you found this useful, tell a friend. If you found this REALLY useful, you can click here to [PayPal.me](https://paypal.me/mcrackan?locale.x=en_us) ### If you found this useful, tell a friend. If you found this REALLY useful, you can click here to [PalPal.me](https://paypal.me/mcrackan?locale.x=en_us)
...or just tell more friends. As long as I'm maintaining this software, it will remain **free** and **open source**. ...or just tell more friends. As long as I'm maintaining this software, it will remain **free** and **open source**.
@ -9,10 +9,8 @@
- [Files and folders](#files-and-folders) - [Files and folders](#files-and-folders)
- [Settings](#settings) - [Settings](#settings)
- [Custom File Naming](NamingTemplates.md) - [Custom File Naming](#custom-file-naming)
- [Command Line Interface](#command-line-interface) - [Command Line Interface](#command-line-interface)
- [Custom Theme Colors](#custom-theme-colors) (Chardonnay Only)
- [Audio Formats (Dolby Atmos, Widevine, Spacial Audio)](AudioFileFormats.md)
@ -30,14 +28,11 @@ To make upgrades and reinstalls easier, Libation separates all of its responsibi
* Allow Libation to fix up audiobook metadata. After decrypting a title, Libation attempts to fix details like chapters and cover art. Some power users and/or control freaks prefer to manage this themselves. By unchecking this setting, Libation will only decrypt the book and will leave metadata as-is, warts and all. * Allow Libation to fix up audiobook metadata. After decrypting a title, Libation attempts to fix details like chapters and cover art. Some power users and/or control freaks prefer to manage this themselves. By unchecking this setting, Libation will only decrypt the book and will leave metadata as-is, warts and all.
In addition to the options that are enabled if you allow Libation to "fix up" the audiobook, it does the following: ### Custom File Naming
* Adds the `TCOM` (`@wrt` in M4B files) metadata tag for the narrators. In Settings, on the Download/Decrypt tab, you can specify the format in which you want your files to be named. As you edit these templates, a live example will be shown. Parameters are listed for folders, files, and files split by chapter including an explanation of what each naming option means. For instance: you can use template `<title short> - <ch# 0> of <ch count> - <ch title>` to create the file `A Study in Scarlet - 04 of 10 - A Flight for Life.m4b`.
* Sets the `©gen` metadata tag for the genres.
* Unescapes the copyright symbol (replace `&#169;` with `©`) These templates apply to GUI and CLI.
* Replaces the recording copyright `(P)` string with `℗`
* Replaces the chapter markers embedded in the aax file with the chapter markers retrieved from Audible's API.
* Sets the embedded cover art image with the 500x500 px cover art retrieved from Audible
### Command Line Interface ### Command Line Interface
@ -76,37 +71,4 @@ export library to file
libationcli export -p "C:\foo\bar\my.csv" -c libationcli export -p "C:\foo\bar\my.csv" -c
libationcli export -p "C:\foo\bar\my.xlsx" --xlsx libationcli export -p "C:\foo\bar\my.xlsx" --xlsx
libationcli export -p "C:\foo\bar\my.xlsx" -x libationcli export -p "C:\foo\bar\my.xlsx" -x
Set download statuses throughout library based on whether each book's audio file can be found.
Must include at least one flag: --downloaded , --not-downloaded.
Downloaded: If the audio file can be found, set download status to 'Downloaded'.
Not Downloaded: If the audio file cannot be found, set download status to 'Not Downloaded'
UI: Visible Books \> Set 'Downloaded' status automatically. Visible books. Prompts before saving changes
CLI: Full library. No prompt
libationcli set-status -d
libationcli set-status -n
libationcli set-status -d -n
``` ```
### Custom Theme Colors
In Libation Chardonnay (not Classic), you may adjust the app colors using the built-in theme editor. Open the Settings window (from the menu bar: Settings > Settings). On the "Important" settings tab, click "Edit Theme Colors".
#### Theme Editor Window
The theme editor has a list of style names and their currently assigned colors. To change a style color, click on the color swatch in the left-hand column to open the color editor for that style. Observe the color changes in real-time on the built-in preview panel on the right-hand side of the theme editor.
You may import or export themes using the buttons at the bottom-left of the theme editor.
"Cancel" or closing the window will revert any changes you've made in the theme editor.
"Reset" will reset any changes you've made in the theme editor.
"Defaults" will restore the application default colors for the active theme ("Light" or "Dark")
"Save" will save the theme colors to the ChardonnayTheme.json file and close the editor.
Note: you may only edit the currently applied theme ("Light" or "Dark").
#### Video Walkthrough
The below video demonstrates using the theme editor to make changes to the Dark theme color pallet.
[](https://github.com/user-attachments/assets/05c0cb7f-578f-4465-9691-77d694111349)

View File

@ -1,104 +0,0 @@
# Audio Formats Produced by Libation
Libation will download audio in a number of different audio formats, depending on the settings you choose within Libation and the per-title availability of audio formats from Audible. The Libation settings which affect the format downloaded by Libation are shown in the Settings menu screenshot below.
Notes:
- Audiobook file extensions are either `.m4b` or `.mp3`. Libation uses the `.m4b` file extension for all non-MP3 files, regardless of the audio codec contained therein. Some media players don't recognize the `.m4b` file extension and may require the extension be changed to `.m4a` or `.mp4`.
- Most (but not all) podcasts are delivered by Audible as native MP3 files. None of the following audio formats and settings discussions pertain to those podcasts because MP3s have no DRM, and those episodes are copied directly to their output folders.
![Audio format settings menu](images/AudioFormatSettings.png)
## Settings Summary
### Audio quality to request from Audible
Audiobooks can be requested from Audible as "Normal" quality or "High" quality, matching the settings in the Audible mobile apps. This setting affects the audio bitrate and, sometimes, the number of audio channels. This setting has no effect on the _audio codec_.
### Use Widevine DRM
When this setting is disabled, all audiobooks will be downloaded using Audible's in-house DRM (AAX(C)) in the [AAC-LC](#aac-lc) format.
When this setting is enabled, Libation will request audio files protected by Google's Widevine Digital Rights Managements scheme, and two additional settings will be unlocked: [Request xHE-AAC Codec](#request-xhe-aac-codec) and [Request Spatial Audio](#request-spatial-audio) (explained further below).
If you don't enable either of those additional options, then enabling 'Use Widevine DRM' will have no pratcical effect in nearly all circumstances. Audiobooks will be downloaded in the same [AAC-LC](#aac-lc) format with the same bitrate and the same number of audio channels. On rare occasions, enabling 'Use Widevine DRM' without the other two options will result in audio files with a different bitrate.
### Request xHE-AAC Codec
Enable this setting to request audiobooks in the [xHE-AAC](#xhe-aac) format. This codec is generally better quality than the [AAC-LC](#aac-lc) codec at the same bitrate, but it isn't as commonly supported by media players, so you may have some difficulty playing these audiobooks. The highest bitrate version of some audiobooks is only available as [xHE-AAC](#xhe-aac).
### Request Spatial Audio
Enable this setting to request audiobooks in a "spatial" ([Dolby Atmos](#dolby-atmos)) audio format. If an audiobook is not available in a spatial format, it will instead be downloaded in the [xHE-AAC codec](#xhe-aac).
### Spatial audio codec
Choose whether spatial audiobooks are downloaded in the [E-AC-3](#e-ac-3) or [AC-4](#ac-4) format.
### Download my books in the original audio format (Lossless)
If selected, Audiobooks will be downloaded and saved in the format delivered by audible (which depends on the settings explained above). Libation will not change the audio.
### Download my books as .MP3 files (transcode if necessary).
If selected, Libation will decode [AAC-LC](#aac-lc), [xHE-AAC](#xhe-aac), and [E-AC-3](#e-ac-3) audiobooks and re-encode them as MP3s using the MP3 encoder settings ([read about LAME MP3 encoder settings](https://lame.sourceforge.io/lame_ui_example.php)). Note that Libation cannot convert [AC-4](#ac-4) audio to MP3.
# Audio Formats
## Traditional Mono and Stereo Formats
### AAC-LC
#### _Full Name_
Advanced Audio Coding - Low Complexity
#### _Description_
This is the base profile for AAC audio and has existed since AAC's initial release in 1997. It enjoys wide support on nearly every conceivable platform capable of playing digital audio, as ubiquitous as MP3.
If Widevine support is not enabled, or if the book is not available in the more high-definition formats, Libation will download audiobooks in this format.
### MP3
#### _Full Name_
MPEG-1 Audio Layer III or MPEG-2 Audio Layer III
#### _Description_
An older (released in 1991) but still nearly universally supported audio codec. Its audio quality is generally worse than AAC-LC at similar bitrates. Audible delivers some podcasts in MP3 format, but no audiobooks are natively availble as MP3. Libation supports converting Audiobooks delivered in other audio formats to MP3. Note that the MP3 format supports a maximum of two audio channels, so multichannel E-AC-3 audio will be downsampled to stereo or mono (depending on the Libation's settings). [AC-4](#ac-4) cannot be converted to MP3.
### xHE-AAC
#### _Full Name_
Extended High-Efficiency Advanced Audio Coding
#### _Description_
This is a proprietary codec created by the [Fraunhofer Institute for Integrated Circuits IIS](https://www.iis.fraunhofer.de/en/ff/amm/broadcast-streaming/xheaac.html). It combines features of the HE-AAC v2 and the baseline USAC (Unified Speech and Audio Coding) profiles with the parts of the MPEG-D DRC Loudness Control Profile or Dynamic Range Control Profile. Therefore, USAC and xHE-AAC are not synonymous and should not be used interchangeably. A player capable of decoding USAC will not necessarily be able to decode xHE-AAC.
xHE-AAC boasts significantly higher quality audio at low bitrates. Though it has existed since at least 2016, playback support is still quite limited. FFmpeg has recently added partial decoder support for the USAC profiles, but it is insufficient to decode the xHE-AAC audio files acquired from Audible (due to FFmpeg's lack of support for MPEG Surround for Mono to Stereo Upmixing; ISO 23003-3:2012 §7.11)
Note that the xHE-AAC files authored by Audible have some USAC conformance errors including:
- Number of samples per frame not matching the UsacConfig coreCoderFrameLength value.
- Disagreement between stts and UsacFrame usacIndependencyFlag value.
- Stts indicating a frame is an immediate play-out frame, but USAC AudioPreRoll is absent.
## Dolby Atmos
Atmos is a surround sound technology that expands on existing surround sound systems by adding height channels as well as free-moving sound objects. Audible delivers Dolby Atmos in two formats: E-AC-3 and AC-4.
Your device's ability to play audio from these formats does not necessarily mean that the audio you are hearing is Atmos (spatial). For instance, downloading the AC-4 codec for Windows ([links in the [Supported media Players](#supported-media-players) section) will enable you to play AC-4 audiobooks, but you'll still need to download [Dolby Access](https://apps.microsoft.com/detail/9n0866fs04w8?hl=en-US&gl=US) and pay $15 to enable _Dolby Atmos For Headphones_. Please refer to [this comment](https://github.com/rmcrackan/Libation/pull/1331#discussion_r2268660524) for additional context.
### E-AC-3
#### _Full Name_
Dolby Digital Plus (a.k.a Enhanced AC-3, DDP, DD+, and EC-3)
#### _Description_
A proprietary digital audio compression scheme developed by Dolby Digital for the transport and storage of multichannel audio. This format can be extended to add support for Atmos, making the codec _Dolby Digital Plus Atmos_. _Dolby Digital Plus Atmos_ is backwards compatible with Dolby Digital Plus, so any media player capable of playing Dolby Digital Plus can play _Dolby Digital Plus Atmos_. Audible spatial audiobooks downloaded in the E-AC-3 format are _Dolby Digital Plus Atmos_. If they are played by a media player that supports Atmos, they will play as Atmos audio. If they are played by a media player that does not support Atmos, they will be played as traditional 5.1 surround audio.
### AC-4
#### _Full Name_
Dolby AC-4
#### _Description_
A proprietary audio compression technology developed by Dolby Digital for the transport and storage of audio channels and/or audio objects. Audible spatial audiobooks downloaded in the AC-4 format are 2-channel AC-4 Immersive Stereo (AC4-IMS) audio, intended for playback in headphones or earbuds (though apparently [not supported on Apple devices](https://github.com/rmcrackan/Libation/issues/996#issuecomment-3169574514)).
# Supported Media Players
Below is an incomplete matrix of codec support across various media players and platforms.
| Player | [AAC-LC](#aac-lc) | [xHE-AAC](#xhe-aac) | [E-AC-3](#e-ac-3) | [AC-4](#ac-4) |
| :--- | :---: | :---: | :---: | :---: |
|Windows Native Support|Yes|Yes<sup>1</sup>|Yes<sup>2,3</sup>|Yes<sup>4</sup>|
|macOS Native Support|Yes|Yes|Yes<sup>3</sup>| |
|Android Native Support<sup>5</sup>|Yes|Yes| | |
|FFmpeg (all platforms)|Yes|Yes<sup>6</sup>|Yes<sup>3</sup>||
|[VLC](https://www.videolan.org/vlc/) (Windows)|Yes| |Yes<sup>3</sup> | |
|[foobar2000](https://www.foobar2000.org/components) (Windows and Mac)|Yes|Yes<sup>7</sup> | | |
|[PotPlayer](https://potplayer.daum.net/) (Windows)|Yes|Yes|Yes<sup>3</sup>| |
|[Samsung Media Player](https://play.google.com/store/apps/details?id=com.sec.android.app.music)<sup>8</sup> (Samsung devices) |Yes|Yes|Yes|Yes|
1. Windows 11 22H2 and later
2. On Windows [prior to Windows 11, version 24H2](https://support.microsoft.com/en-us/windows/codecs-in-media-player-d5c2cdcd-83a2-4805-abb0-c6888138e456). You can still get the codec by running the following command from a Windows PowerShell console: `winget install --id 9nvjqjbdkn97`
3. As mentioned in the [Dolby Atmos](#dolby-atmos) section, just because a media player can play a file does not mean it's rendering Atmos. _Dolby Digital Plus Atmos_ is backwards compatible with _Dolby Digital Plus_, so media players which only support _Dolby Digital Plus_ will play E-AC-3 audio files as regular 5.1 surround without rendering the Atmos spatial qualities. Additional software or hardware support may be required for Dolby Atmos playback.
4. You can download the AC-4 codec for Windows from 3rd party sites like [Major Geeks](https://www.majorgeeks.com/files/details/dolby_ac_3ac_4_installer.html) and [Free-Codecs](https://www.free-codecs.com/dolby-ac-4-decoder_download.htm). Once you install the codec bundle from one of those sources, the Windows store app will keep it updated. Read more about the process [in this comment](https://github.com/rmcrackan/Libation/pull/1331#discussion_r2268660524).
5. All Android devices will support AAC-LC and xHE-AAC. Some manufactures (such as Samsung) will include Dolby codecs for playing E-AC-3 and AC-4 audio.
6. requires FFmpeg to be [built with fdk-aac](https://trac.ffmpeg.org/wiki/Encode/AAC#fdk_aac). You will almost certainly not find pre-build binaries in the wild due to licensing restrictions.
7. Requires the [fdk-aac plugin](https://www.foobar2000.org/components/view/foo_pd_aac) (Windows only)
8. Requires audio file extensions to be `.m4a` or `.mp4`. Libation sets the file extensions to `.m4b`, so you must manually change it to `.m4a` by renaming the audio file.

View File

@ -1,76 +0,0 @@
## [Download Libation](https://github.com/rmcrackan/Libation/releases/latest)
### If you found this useful, tell a friend. If you found this REALLY useful, you can click here to [PayPal.me](https://paypal.me/mcrackan?locale.x=en_us)
...or just tell more friends. As long as I'm maintaining this software, it will remain **free** and **open source**.
> [!WARNING]
> ## Breaking Changes
> * The docker image now runs as user 1001 and group 1001. Make sure that the permissions on your volumes allow user 1001 to read and write to them or see the User section below for other options, or if you're not sure.
> * `SLEEP_TIME` is now set to `-1` by default. This means the image will run once and exit. If you were relying on the previous default, you'll need to explicitly set the `SLEEP_TIME` environment variable to `30m` to replicate the previous behavior.
> * The docker image now ignores the values in `Settings.json` for `Books` and `InProgress`. You can now change the folder that books are saved to by using the `LIBATION_BOOKS_DIR` environment variable.
# Disclaimer
The docker image is provided as-is. We hope it can be useful to you but it is not officially supported.
### Configuration
Configuration in Libation is handled by two files, `AccountsSettings.json` and `Settings.json`. These files can usually be found in the Libation folder in your user's home directory. The easiest way to configure these is to run the desktop version of Libation and then copy them into a folder, such as `/opt/libation/config`, that you'll volume mount into the image. `Settings.json` is technically optional, and, if not provided, Libation will run using the default settings. Additionally, the `Books` and `InProgress` settings in `Settings.json` will be ignored and the image will instead substitute it's own values.
### Running
Once the configuration files are copied, the docker image can be run with the following command.
```
sudo docker run -d \
-v /opt/libation/config:/config \
-v /opt/libation/books:/data \
--name libation \
--restart=always \
rmcrackan/libation:latest
```
By default the container will scan for new books once and download any new ones. This is configurable by passing in a value for the `SLEEP_TIME` environment variable. For example, if you pass in `10m` it will keep running, scan for new books, and download them every 10 minutes.
```
sudo docker run -d \
-v /opt/libation/config:/config \
-v /opt/libation/books:/data \
-e SLEEP_TIME='10m' \
--name libation \
--restart=always \
rmcrackan/libation:latest
```
### Environment Variables
| Env Var | Default | Description |
| -------- | ------- | ----------- |
| SLEEP_TIME | -1 | Length of time to sleep before doing another scan/download. Set to -1 to run one. |
| LIBATION_BOOKS_DIR | /data | Folder where books will be saved |
| LIBATION_CONFIG_DIR | /config | Folder to read configuration from. |
| LIBATION_DB_DIR | /db | Optional folder to load database from. If not mounted, will load database from `LIBATION_CONFIG_DIR`. |
| LIBATION_DB_FILE | | Name of database file to load. By default it will look for all `.db` files and load one if there is only one present. |
| LIBATION_CREATE_DB | true | Whether or not the image should create a database file if none are found. |
### User
This docker image runs as user `1001`. In order for the image to function properly, user `1001` must be able to read and write the volumes that are mounted in. If they are not, you will see errors, including [sqlite error](#1060), [Microsoft.Data.Sqlite.SqliteException](#1110), [unable to open database file](#1113), [Microsoft.EntityFrameworkCore.DbUpdateException](#1049)
If you're not sure what your user number is, check the output of the `id` command. Docker should normally run with the number of the user who configured and ran it.
If you want to change the user the image runs as, you can specify `-u <uid>:<gid>`. For example, to run it as user `2000` and group `3000`, you could do the following:
```
sudo docker run -d \
-u 2000:3000 \
-v /opt/libation/config:/config \
-v /opt/libation/books:/data \
--name libation \
--restart=always \
rmcrackan/libation:latest
```
If the user it's running as is correct, and it still cannot write, be sure to check whether the files and/or folders might be owned by the wrong user. You can use the `chown` command to change the owner of the file to the correct user and group number, for example: `chown -R 1001:1001 /mnt/audiobooks /mnt/libation-config`
### Advanced Database Options
The docker image supports an optional database mount location defined by `LIBATION_DB_DIR`. This allows the database to be mounted as read/write, while allowing the rest of the configuration files to be mounted as read only. This is specifically useful if running in Kubernetes where you can use Configmaps and Secrets to define the configuration. If the `LIBATION_DB_DIR` is mounted, it will be used, otherwise it will look for the database in `LIBATION_CONFIG_DIR`. If it does not find the database in the expected location, it will attempt to make an empty database there.
### Getting help
As mentioned above: docker is not officially supported. I'm adding this at the bottom of the page for anyone serious enough to have read this far. If you've tried everything above and would still like help, you can open an [issue](https://github.com/rmcrackan/Libation/issues). Please include `[docker]` in the title. There are also some docker folks who have offered occasional assistance who you can tag within your issue: `@ducamagnifico` , `@wtanksleyjr` , `@CLHatch`.
**Reminder** that these are just friendly users who are sometimes around. They're *not* our customer support.

View File

@ -1,56 +0,0 @@
## [Download Libation](https://github.com/rmcrackan/Libation/releases/latest)
### If you found this useful, tell a friend. If you found this REALLY useful, you can click here to [PayPal.me](https://paypal.me/mcrackan?locale.x=en_us)
...or just tell more friends. As long as I'm maintaining this software, it will remain **free** and **open source**.
# Frequently Asked Questions
## Q: Where can I get help for my specific problem?
**A:** [You can open an issue here](https://github.com/rmcrackan/Libation/issues) for bug reports, feature requests, or specialized help.
## Q: What's the difference between 'Classic' and 'Chardonnay'?
**A:** First and most importantly: Classic and Chardonnay have the exact same features.
* **Classic** is Windows only. Its older 'grey boxes' look has a compact design which allows for more information on the screen. Notably, Classic was written using an older, more mature technology which has built-in support for screenreaders.
* **Chardonnay** is available for Windows, Mac, and Linux. Its modern design has a more open look and feel.
## Q: Now that I've downloaded my books, how can I listen to them?
**A:** You can use any app which plays m4b files (or mp3 files if you used that setting). Here are just a few ideas. Disclaimer: I have no affiliation with any of these companies:
* iOS: [BookPlayer](https://apps.apple.com/us/app/bookplayer/id1138219998)
* iOS: [Bound](https://apps.apple.com/us/app/bound-audiobook-player/id1041727137)
* Android: [Smart AudioBook Player](https://play.google.com/store/apps/details?id=ak.alizandro.smartaudiobookplayer&hl=en_US&gl=US)
* Android: [Listen](https://play.google.com/store/apps/details?id=ru.litres.android.audio&hl=en_US&gl=US)
* Desktop: [VLC](https://www.videolan.org/)
* Windows Desktop: [Audibly](https://github.com/rstewa/Audibly) -- a desktop player build specifically for audiobooks
Self-hosting online:
* [audiobookshelf](https://www.audiobookshelf.org). On [reddit](https://www.reddit.com/r/audiobookshelf/)
* [plex](https://www.plex.tv/). Listen with [Prologue](https://prologue.audio/) (iOS)
## Q: I'm having trouble playing my non-spatial audiobook, how can I fix this?
**A:** If you enabled the [Request xHE-AAC Codec](AudioFileFormats.md#request-xhe-aac-codec) option in settings, then the audiobook is being downloaded in the [xHE-AAC codec](AudioFileFormats.md#xhe-aac) which isn't widely supported. You have two options:
1. Use a media player which supports the xHE-AAC codec. [See an incomplete list of media players which support xHE-AAC](AudioFileFormats.md#supported-media-players).
2. Disable the [Request xHE-AAC Codec](AudioFileFormats.md#request-xhe-aac-codec) option in settings and re-download the audiobook. This will cause Libation to download audiobooks in the [AAC-LC codec](AudioFileFormats.md#aac-lc), which enjoys near-universal media player support.
## Q: I'm having trouble playing my book with 4D, spatial audio, or Dolby Atmos, how can I fix this?
**A:** Spatial audiobooks are delivered in two formats: [E-AC-3](AudioFileFormats.md#e-ac-3) and [AC-4](AudioFileFormats.md#ac-4). [See an incomplete list of media players which support those codecs](AudioFileFormats.md#supported-media-players).
## Q: I'm having trouble loggin into my Brazil account.
**A:** For reasons known only to Jeff Bezos and God, amazon and audible brazil handle logins slightly differently. The external browser login option is not possible for Brazil. [See this ticket for more details.](https://github.com/rmcrackan/Libation/issues/1103)
## Q: How do I use Libation with a South Africa account?
**A:** Like many countries, amazon gives South Africa it's own amazon site. [Unlike many other regions](https://www.audible.com/ep/country-selector) there is not South Africa specific audible site. Use `US` for your region -- ie: audible.com.
(Not exactly a *frequently* asked question but it's come up more than once)

View File

@ -1,6 +1,6 @@
## [Download Libation](https://github.com/rmcrackan/Libation/releases/latest) ## [Download Libation](https://github.com/rmcrackan/Libation/releases/latest)
### If you found this useful, tell a friend. If you found this REALLY useful, you can click here to [PayPal.me](https://paypal.me/mcrackan?locale.x=en_us) ### If you found this useful, tell a friend. If you found this REALLY useful, you can click here to [PalPal.me](https://paypal.me/mcrackan?locale.x=en_us)
...or just tell more friends. As long as I'm maintaining this software, it will remain **free** and **open source**. ...or just tell more friends. As long as I'm maintaining this software, it will remain **free** and **open source**.
@ -15,27 +15,19 @@
- [Download PDF attachments](#download-pdf-attachments) - [Download PDF attachments](#download-pdf-attachments)
- [Details of downloaded files](#details-of-downloaded-files) - [Details of downloaded files](#details-of-downloaded-files)
- [Export your library](#export-your-library) - [Export your library](#export-your-library)
- [I still need help](#i-still-need-help)
### [Download Libation](https://github.com/rmcrackan/Libation/releases) ### [Download Libation](https://github.com/rmcrackan/Libation/releases)
##### Which version? Chardonnay vs Classic
Nearly 100% of the difference is look and feel -- it's a matter of preference.
Chardonnay has an updated look and will work and look the same on Windows, Mac, and Linux.
Classic is Windows only. It has an older look because it's built with older, duller, and more mature technology. This tech has built into it better support for things like accessibility for screen readers.
### Installation ### Installation
* Windows * Windows
Extract the zip file to a folder and then run `Libation.exe` from inside of that folder. Do not put it in Program Files. The inability to edit files from there causes problems with configuration and updating. Extract the zip file to a folder and then run `Libation.exe` from inside of that folder.
* [Linux](InstallOnLinux.md) * [Ubuntu Linux (beta)](InstallOnLinux.md)
* [MacOS](InstallOnMac.md) * [MacOS (beta)](InstallOnMac.md)
### Create Accounts ### Create Accounts
@ -149,7 +141,3 @@ When you set up Libation, you'll specify a Books directory. Libation looks insid
![Export](images/Export.png) ![Export](images/Export.png)
Export your library to Excel, CSV, or JSON Export your library to Excel, CSV, or JSON
### I still need help
[You can open an issue here](https://github.com/rmcrackan/Libation/issues) for bug reports, feature requests, or specialized help.

View File

@ -1,67 +1,100 @@
## [Download Libation](https://github.com/rmcrackan/Libation/releases/latest) ## [Download Libation](https://github.com/rmcrackan/Libation/releases/latest)
### If you found this useful, tell a friend. If you found this REALLY useful, you can click here to [PayPal.me](https://paypal.me/mcrackan?locale.x=en_us) ### If you found this useful, tell a friend. If you found this REALLY useful, you can click here to [PalPal.me](https://paypal.me/mcrackan?locale.x=en_us)
...or just tell more friends. As long as I'm maintaining this software, it will remain **free** and **open source**. ...or just tell more friends. As long as I'm maintaining this software, it will remain **free** and **open source**.
## Packaging status
[![Packaging status](https://repology.org/badge/vertical-allrepos/libation.svg)](https://repology.org/project/libation/versions) # Run Libation on Ubuntu (Beta)
This walkthrough should get you up and running with Libation on your Ubuntu machine.
New Libation releases are automatically packed into `.deb` and `.rpm` package and are available from the [Libation repository's releases page](https://github.com/rmcrackan/Libation/releases). Some limitations of the linux release are:
- Cannot customize how illegial filename characters are replaced.
- The Auto-update function is unavailable
Run these commands in your terminal to download and install Libation. **Make sure you replace** `X.X.X` with the latest Libation version and `ARCH` with your CPU's architechture (either `amd64` or `arm64`). ## Dependencies
### Debian ### FFMpeg (Optional)
```Console If you want to convert your audiobooks to mp3, install FFMpeg using the following command:
wget -O libation.deb https://github.com/rmcrackan/Libation/releases/download/vX.X.X/Libation.X.X.X-linux-chardonnay-ARCH.deb
sudo apt install ./libation.deb
```
### Redhat and CentOS
```Console
wget -O libation.rpm https://github.com/rmcrackan/Libation/releases/download/vX.X.X/Libation.X.X.X-linux-chardonnay-ARCH.rpm
sudo yum install ./libation.rpm
```
### Fedora
```Console
wget -O libation.rpm https://github.com/rmcrackan/Libation/releases/download/vX.X.X/Libation.X.X.X-linux-chardonnay-ARCH.rpm
sudo dnf5 install ./libation.rpm
```
---
### Arch Linux
```Console
yay -S libation
```
This package is available on [Arch User Repository](https://aur.archlinux.org/packages/libation), install via your choice of [AUR helpers](https://wiki.archlinux.org/title/AUR_helpers).
Thanks to [mhdi](https://aur.archlinux.org/account/mhdi) for taking care of AUR package maintenance. ```console
### NixOS sudo apt-get install -y ffmpeg
- Install via `nix-shell`
```Console
nix-shell -p libation
``` ```
A `nix-shell` will temporarily modify your $PATH environment variable. This can be used to try a piece of software before deciding to permanently install it.
- Install via NixOS configuration
```Console
environment.systemPackages = [
pkgs.libation
];
```
Add the following Nix code to your NixOS Configuration, usually located in `/etc/nixos/configuration.nix`
- On NixOS via via `nix-env`
```Console
nix-env -iA nixos.libation
```
- On Non NixOS via `nix-env`
```Console
nix-env -iA nixpkgs.libation
```
Warning: Using `nix-env` permanently modifies a local profile of installed packages. This must be updated and maintained by the user in the same way as with a traditional package manager.
Thanks to [TomaSajt](https://github.com/tomasajt) for taking care of Nix package maintenance. ## Install Libation
If your desktop uses gtk, you should now see Libation among your applications. Download the most recent linux-64 binaries zip file and save it as `libation-linux-bin.zip`. Save the 'install-libation.sh' bash script to a file. From the terminal make the script file executable:
Additionally, you may launch Libation, LibationCli, and Hangover (the Libation recovery app) via the command line using 'libation, libationcli', and 'hangover' aliases respectively. <details>
<summary>install-libation.sh</summary>
```BASH
#!/bin/bash
FILE=$1
if [ -z "$FILE" ]
then echo "This script must be called with a the Libation Linux bin zip file as an argument."
exit
fi
if [[ "$EUID" -ne 0 ]]
then echo "Please run as root"
exit
fi
if [ ! -f "$FILE" ]
then echo "The file \"$FILE\" does not exist."
exit
fi
echo "Extracting $FILE"
FOLDER="$(dirname "$FILE")/libation_src"
echo "$FOLDER"
sudo -u $SUDO_USER unzip -q -o ${FILE} -d ${FOLDER}
if [ $? -ne 0 ]
then echo "Error unzipping ${FILE}"
exit
fi
sudo -u $SUDO_USER chmod +700 ${FOLDER}/Libation
sudo -u $SUDO_USER chmod +700 ${FOLDER}/Hangover
sudo -u $SUDO_USER chmod +700 ${FOLDER}/LibationCli
#Remove previous installation program files and sym link
rm /usr/bin/Libation
rm /usr/bin/Hangover
rm /usr/bin/LibationCli
rm /usr/bin/libationcli
rm /usr/lib/libation -r
#Copy install files, icon and desktop file
cp ${FOLDER}/glass-with-glow_256.svg /usr/share/icons/hicolor/scalable/apps/libation.svg
cp ${FOLDER}/Libation.desktop /usr/share/applications/Libation.desktop
mv ${FOLDER}/ /usr/lib/libation
chmod +666 /usr/share/icons/hicolor/scalable/apps/libation.svg
gtk-update-icon-cache -f /usr/share/icons/hicolor/
ln -s /usr/lib/libation/Libation /usr/bin/Libation
ln -s /usr/lib/libation/Hangover /usr/bin/Hangover
ln -s /usr/lib/libation/LibationCli /usr/bin/LibationCli
ln -s /usr/lib/libation/LibationCli /usr/bin/libationcli
echo "Done!"
```
</details>
```console
chmod +700 install-libation.sh
```
Then run the script with the libation binaries zipfile as an argument.
```console
sudo ./install-libation.sh libation-linux-bin.zip
```
You should now see Libation among your applications.
Report bugs to https://github.com/rmcrackan/Libation/issues Report bugs to https://github.com/rmcrackan/Libation/issues

View File

@ -1,82 +1,40 @@
## [Download Libation](https://github.com/rmcrackan/Libation/releases/latest) ## [Download Libation](https://github.com/rmcrackan/Libation/releases/latest)
### If you found this useful, tell a friend. If you found this REALLY useful, you can click here to [PayPal.me](https://paypal.me/mcrackan?locale.x=en_us) ### If you found this useful, tell a friend. If you found this REALLY useful, you can click here to [PalPal.me](https://paypal.me/mcrackan?locale.x=en_us)
...or just tell more friends. As long as I'm maintaining this software, it will remain **free** and **open source**. ...or just tell more friends. As long as I'm maintaining this software, it will remain **free** and **open source**.
# Run Libation on MacOS # Run Libation on MacOS (Beta)
This walkthrough should get you up and running with Libation on your Mac. This walkthrough should get you up and running with Libation on your Mac.
## Supports macOS 13 (Ventura) and above
## Install Libation ## Install Libation
- Download the file from the latest release and extract it. - Download latest MacOS zip to downloads folder
- Apple Silicon (M1, M2, ...): `Libation.x.x.x-macOS-chardonnay-`**arm64**`.tgz` - Extract and rename folder to Libation
- Intel: `Libation.x.x.x-macOS-chardonnay-`**x64**`.tgz` - in terminal type cd and then drag your folder of libation to terminal so it looks like `cd/users/YourName/Downloads/Libation`
- Move the extracted Libation app bundle to your applications folder. - Type following commands
- Right-click on Libation and then click on open
- The first time, it will not immediately show you an option to open it. Just dismiss the dialog and do the same thing again (right-click -> open) then you will get an option to run the unsigned application. This takes about 10 seconds.
## If this doesn't work ```console
chmod +x ./Libation
You can add Libation as a safe app without touching Gatekeeper. sudo spctl --add --label "Libation" ./Libation
./Libation
- Copy/paste/run the following command. Adjust the file path to the Libation.app on your computer if necessary.
```Console
xattr -r -d com.apple.quarantine ~/Downloads/Libation.app
```
- Close the terminal and use Libation!
## If this still doesn't work
- Copy/paste/run the following command (you'll be prompted to enter your Mac password)
```Console
sudo spctl --master-disable && sudo spctl --add --label "Libation" /Applications/Libation.app && open /Applications/Libation.app && sudo spctl --master-enable
``` ```
* Close the terminal and use Libation! ## Trouble with Gatekeeper?
## "Apple can't check app for malicious software" If Gatekeeper is giving you trouble with Libation:
From: [How to Open Anyway](https://support.apple.com/guide/mac-help/apple-cant-check-app-for-malicious-software-mchleab3a043/mac): Disable the block
* On your Mac, choose Apple menu > System Settings, then click Privacy & Security in the sidebar. (You may need to scroll down.) `sudo spctl --master-disable`
* Go to Security, then click Open.
* Click Open Anyway. This button is available for about an hour after you try to open the app.
* Enter your login password, then click OK.
## Troubleshooting Launch Libation and login, etc. and allow the rules to update then re-enable the block.
If Libation fails to start after completing the above steps, try the following: `sudo spctl --master-enable`
1. Right-click the Libation app in your applications folder and select _Show Package Contents_ Once Gatekeeper reenabled, you can open Libation again without it being blocked.
2. Open the `Contents` folder and then the `MacOS` folder.
3. Find the file named `Libation`, right-click it, and then select _Open_.
Libation _should_ launch, and you should now be able to open Libation by just double-clicking the app bundle in your applications folder. Thanks [joseph-holland](https://github.com/rmcrackan/Libation/issues/327#issuecomment-1268993349)!
Report bugs to https://github.com/rmcrackan/Libation/issues
## Running Hangover
Libation comes with a recovery app called Hangover. You can start it by running this command:
```Console
open /Applications/Libation.app --args hangover
```
## Running LibationCli
Libation comes with a command-line interface. Unfortunately, due to the way apps are sandboxed on mac, its use is somewhat limited. To open a new sandboxed terminal in LibationCli's directory, run the following command:
```Console
open /Applications/Libation.app --args cli
```
To use LibationCli from an unsandboxed terminal, you must disable gatekeeper again and run the program directly at `/Applications/Libation.app/Contents/MacOS/LibationCli`
Then use `./LibationCli` to execute a command.
## Get Libation running on Mac
[Run Libation on MacOS](https://user-images.githubusercontent.com/37587114/219271379-a922e4e1-48a0-48e4-bd81-48aa1226a4f5.mp4)

View File

@ -1,64 +0,0 @@
# Development Environment Setup using Nix or Nix Flakes on Linux x86_64
[Nix flakes](https://nixos.wiki/wiki/Flakes) can be used to provide version controlled reproducible and cross-platform development environments. The key files are:
- `flake.nix`: Defines the flake inputs and outputs, including development shells.
- `shell.nix`: This file defines the dependencies and additionally adds support for the Impure `nix-shell` method. This is used by the flake to create the dev environment.
- `flake.lock`: Locks the versions of inputs for reproducibility.
---
## Prerequisites
- [Nix](https://nixos.org/download.html) the package manager or NixOs installed on Linux (x86_64-linux)
- Optional: flakes support enabled.
---
## Using the Development Shell
You have two primary ways to enter the development shell with Nix:
### 1. Using `nix develop` (flake-native command)
This is the recommended way if you have Nix with flakes support. Flake guarantee the versions of the dependencies and can be controlled through `flake.nix` and `flake.lock`.
```
nix develop
```
This will open a shell with all dependencies and environment configured as per the `flake.nix` for (`x86_64-linux`) systems only at this time.
---
### 2. Using `nix-shell` (that's why shell.nix is a separate file)
If you want to use traditional `nix-shell` tooling which uses the nixpkgs version of your system:
```
nix-shell
```
This will drop you into the shell environment defined in `shell.nix`. Note that this is not flake-native method and does not use the locked nixpkgs in `flake.lock` so exact versions of the dependancies is not guaranteed.
---
## Whats inside the dev shell?
- The environment variables and packages configured in `shell.nix` will be available.
- The package set (`pkgs`) used aligns with the versions locked in `flake.lock` to ensure reproducibility.
---
## Example Workflow using flakes
```
# Navigate to the project root folder which contains the flake.nix, flake.lock and shell.nix files.
cd /home/user/dev/Libation
# Enter the flake development shell (Linux x86_64)
nix develop
# run VSCode or VSCodium from the current shell environment
code .
# Run or Debug using VSCode and VSCodium using the linux Launch configuration.
```
![Debug using VSCode and VSCodium](./images/StartingDebuggingInVSCode.png)
You can also Build and run your application inside the shell.
```
dotnet build ./Source/LibationAvalonia/LibationAvalonia.csproj -p:TargetFrameworks=net9.0 -p:TargetFramework=net9.0 -p:RuntimeIdentifier=linux-x64
```
---
## Notes
- Leaving the current shell environemnt will drop all added dependancies and you will not be able to run or debug the program unless your system has those dependancies defined globally.
- To exit the shell environment voluntarily use `exit` inside the shell.
- Ensure you have no conflicting `nix.conf` or `global.json` that might affect SDK versions or runtime identifiers.
- Keep your `flake.lock` file committed to ensure builds are reproducible for all collaborators.
---
## References
- [Nix Flakes - NixOS Wiki](https://nixos.wiki/wiki/Flakes)
- [Nix.dev - Introduction to Nix flakes](https://nix.dev/manual/nix/2.28/command-ref/new-cli/nix3-flake-init)
- [Nix-shell Manual](https://nixos.org/manual/nix/stable/command-ref/nix-shell.html)

View File

@ -1,178 +0,0 @@
## [Download Libation](https://github.com/rmcrackan/Libation/releases/latest)
### If you found this useful, tell a friend. If you found this REALLY useful, you can click here to [PayPal.me](https://paypal.me/mcrackan?locale.x=en_us)
...or just tell more friends. As long as I'm maintaining this software, it will remain **free** and **open source**.
# Naming Templates
File and Folder names can be customized using Libation's built-in tag template naming engine. To edit how folder and file names are created, go to Settings \> Download/Decrypt and edit the naming templates. If you're splitting your audiobook into multiple files by chapter, you can also use a custom template to set each chapter's title metadata tag by editing the template in Settings \> Audio File Options.
These templates apply to both GUI and CLI.
# Table of Contents
- [Template Tags](#template-tags)
- [Property Tags](#property-tags)
- [Conditional Tags](#conditional-tags)
- [Tag Formatters](#tag-formatters)
- [Text Formatters](#text-formatters)
- [Series Formatters](#series-formatters)
- [Series List Formatters](#series-list-formatters)
- [Name Formatters](#name-formatters)
- [Name List Formatters](#name-list-formatters)
- [Number Formatters](#number-formatters)
- [Date Formatters](#date-formatters)
# Template Tags
These are the naming template tags currently supported by Libation.
## Property Tags
These tags will be replaced in the template with the audiobook's values.
|Tag|Description|Type|
|-|-|-|
|\<id\> **†**|Audible book ID (ASIN)|Text|
|\<title\>|Full title with subtitle|[Text](#text-formatters)|
|\<title short\>|Title. Stop at first colon|[Text](#text-formatters)|
|\<audible title\>|Audible's title (does not include subtitle)|[Text](#text-formatters)|
|\<audible subtitle\>|Audible's subtitle|[Text](#text-formatters)|
|\<author\>|Author(s)|[Name List](#name-list-formatters)|
|\<first author\>|First author|[Name](#name-formatters)|
|\<narrator\>|Narrator(s)|[Name List](#name-list-formatters)|
|\<first narrator\>|First narrator|[Name](#name-formatters)|
|\<series\>|All series to which the book belongs (if any)|[Series List](#series-list-formatters)|
|\<first series\>|First series|[Series](#series-formatters)|
|\<series#\>|Number order in series (alias for \<first series[{#}]\>|[Number](#number-formatters)|
|\<bitrate\>|Bitrate (kbps) of the last downloaded audiobook|[Number](#number-formatters)|
|\<samplerate\>|Sample rate (Hz) of the last downloaded audiobook|[Number](#number-formatters)|
|\<channels\>|Number of audio channels in the last downloaded audiobook|[Number](#number-formatters)|
|\<codec\>|Audio codec of the last downloaded audiobook|[Text](#text-formatters)|
|\<file version\>|Audible's file version number of the last downloaded audiobook|[Text](#text-formatters)|
|\<libation version\>|Libation version used during last download of the audiobook|[Text](#text-formatters)|
|\<account\>|Audible account of this book|[Text](#text-formatters)|
|\<account nickname\>|Audible account nickname of this book|[Text](#text-formatters)|
|\<locale\>|Region/country|[Text](#text-formatters)|
|\<year\>|Year published|[Number](#number-formatters)|
|\<language\>|Book's language|[Text](#text-formatters)|
|\<language short\> **†**|Book's language abbreviated. Eg: ENG|Text|
|\<file date\>|File creation date/time.|[DateTime](#date-formatters)|
|\<pub date\>|Audiobook publication date|[DateTime](#date-formatters)|
|\<date added\>|Date the book added to your Audible account|[DateTime](#date-formatters)|
|\<ch count\> **‡**|Number of chapters|[Number](#number-formatters)|
|\<ch title\> **‡**|Chapter title|[Text](#text-formatters)|
|\<ch#\> **‡**|Chapter number|[Number](#number-formatters)|
|\<ch# 0\> **‡**|Chapter number with leading zeros|[Number](#number-formatters)|
**†** Does not support custom formatting
**‡** Only valid for Chapter Filename and Chapter Tile Metadata
To change how these properties are displayed, [read about custom formatters](#tag-formatters)
## Conditional Tags
Anything between the opening tag (`<tagname->`) and closing tag (`<-tagname>`) will only appear in the name if the condition evaluates to true.
|Tag|Description|Type|
|-|-|-|
|\<if series-\>...\<-if series\>|Only include if part of a book series or podcast|Conditional|
|\<if podcast-\>...\<-if podcast\>|Only include if part of a podcast|Conditional|
|\<if bookseries-\>...\<-if bookseries\>|Only include if part of a book series|Conditional|
|\<if podcastparent-\>...\<-if podcastparent\>**†**|Only include if item is a podcast series parent|Conditional|
|\<has PROPERTY-\>...\<-has\>|Only include if the PROPERTY has a value (i.e. not null or empty)|Conditional|
**†** Only affects the podcast series folder naming if "Save all podcast episodes to the series parent folder" option is checked.
For example, `<if podcast-><series><-if podcast>` will evaluate to the podcast's series name if the file is a podcast. For audiobooks that are not podcasts, that tag will be blank.
You can invert the condition (instead of displaying the text when the condition is true, display the text when it is false) by playing a `!` symbol before the opening tag name.
|Inverted Tag|Description|Type|
|-|-|-|
|\<!if series-\>...\<-if series\>|Only include if *not* part of a book series or podcast|Conditional|
|\<!if podcast-\>...\<-if podcast\>|Only include if *not* part of a podcast|Conditional|
|\<!if bookseries-\>...\<-if bookseries\>|Only include if *not* part of a book series|Conditional|
|\<!if podcastparent-\>...\<-if podcastparent\>**†**|Only include if item is *not* a podcast series parent|Conditional|
|\<!has PROPERTY-\>...\<-has\>|Only include if the PROPERTY *does not* have a value (i.e. is null or empty)|Conditional|
**†** Only affects the podcast series folder naming if "Save all podcast episodes to the series parent folder" option is checked.
As an example, this folder template will place all Liberated podcasts into a "Podcasts" folder and all liberated books (not podcasts) into a "Books" folder.
`<if podcast->Podcasts<-if podcast><!if podcast->Books<-if podcast>\<title>`
This example will add a number if the `<series#\>` tag has a value:
`<has series#><series#><-has>`
This example will put non-series books in a "Standalones" folder:
`<!if series->Standalones/<-if series>`
And this example will customize the title based on whether the book has a subtitle:
`<audible title><has audible subtitle->-<audible subtitle><-has>`
# Tag Formatters
**Text**, **Name List**, **Number**, and **DateTime** tags can be optionally formatted using format text in square brackets after the tag name. Below is a list of supported formatters for each tag type.
## Text Formatters
|Formatter|Description|Example Usage|Example Result|
|-|-|-|-|
|L|Converts text to lowercase|\<title[L]\>|a study in scarlet a sherlock holmes novel|
|U|Converts text to uppercase|\<title short[U]\>|A STUDY IN SCARLET|
## Series Formatters
|Formatter|Description|Example Usage|Example Result|
|-|-|-|-|
|\{N \| # \| ID\}|Formats the series using<br>the series part tags.<br>\{N\} = Series Name<br>\{#\} = Number order in series<br>\{#:[Number_Formatter](#number-formatters)\} = Number order in series, formatted<br>\{ID\} = Audible Series ID<br><br>Default is \{N\}|`<first series>`<hr>`<first series[{N}]>`<hr>`<first series[{N}, {#}, {ID}]>`<hr>`<first series[{N}, {ID}, {#:00.0}]>`|Sherlock Holmes<hr>Sherlock Holmes<hr>Sherlock Holmes, 1-6, B08376S3R2<hr>Sherlock Holmes, B08376S3R2, 01.0-06.0|
## Series List Formatters
|Formatter|Description|Example Usage|Example Result|
|-|-|-|-|
|separator()|Speficy the text used to join<br>multiple series names.<br><br>Default is ", "|`<series[separator(; )]>`|Sherlock Holmes; Some Other Series|
|format(\{N \| # \| ID\})|Formats the series properties<br>using the name series tags.<br>See [Series Formatter Usage](#series-formatters) above.|`<series[format({N}, {#})`<br>`separator(; )]>`<hr>`<series[format({ID}-{N}, {#:00.0})]>`|Sherlock Holmes, 1-6; Book Collection, 1<hr>B08376S3R2-Sherlock Holmes, 01.0-06.0, B000000000-Book Collection, 01.0|
|max(#)|Only use the first # of series<br><br>Default is all series|`<series[max(1)]>`|Sherlock Holmes|
## Name Formatters
|Formatter|Description|Example Usage|Example Result|
|-|-|-|-|
|\{T \| F \| M \| L \| S \| ID\}|Formats the human name using<br>the name part tags.<br>\{T\} = Title (e.g. "Dr.")<br>\{F\} = First name<br>\{M\} = Middle name<br>\{L\} = Last Name<br>\{S\} = Suffix (e.g. "PhD")<br>\{ID\} = Audible Contributor ID<br><br>Default is \{P\} \{F\} \{M\} \{L\} \{S\}|`<first narrator[{L}, {F}]>`<hr>`<first author[{L}, {F} _{ID}_]>`|Fry, Stephen<hr>Doyle, Arthur \_B000AQ43GQ\_;<br>Fry, Stephen \_B000APAGVS\_|
## Name List Formatters
|Formatter|Description|Example Usage|Example Result|
|-|-|-|-|
|separator()|Speficy the text used to join<br>multiple people's names.<br><br>Default is ", "|`<author[separator(; )]>`|Arthur Conan Doyle; Stephen Fry|
|format(\{T \| F \| M \| L \| S \| ID\})|Formats the human name using<br>the name part tags.<br>See [Name Formatter Usage](#name-formatters) above.|`<author[format({L}, {F})`<br>`separator(; )]>`<hr>`<author[format({L}, {F}`<br>`_{ID}_) separator(; )]>`|Doyle, Arthur; Fry, Stephen<hr>Doyle, Arthur \_B000AQ43GQ\_;<br>Fry, Stephen \_B000APAGVS\_|
|sort(F \| M \| L)|Sorts the names by first, middle,<br>or last name<br><br>Default is unsorted|`<author[sort(M)]>`|Stephen Fry, Arthur Conan Doyle|
|max(#)|Only use the first # of names<br><br>Default is all names|`<author[max(1)]>`|Arthur Conan Doyle|
## Number Formatters
For more custom formatters and examples, [see this guide from Microsoft](https://learn.microsoft.com/en-us/dotnet/standard/base-types/custom-numeric-format-strings).
|Formatter|Description|Example Usage|Example Result|
|-|-|-|-|
|\[integer\]|Zero-pads the number|\<bitrate\[4\]\><br>\<series#\[3\]\><br>\<samplerate\[6\]\>|0128<br>001<br>044100|
|0|Replaces the zero with the corresponding digit if one<br>is present; otherwise, zero appears in the result string.|\<series#\[000.0\]\>|001.0|
|#|Replaces the "#" symbol with the corresponding digit if one<br> is present; otherwise, no digit appears in the result string|\<series#\[00.##\]\>|01|
## Date Formatters
Form more standard formatters, [see this guide from Microsoft](https://learn.microsoft.com/en-us/dotnet/standard/base-types/standard-date-and-time-format-strings).
### Standard DateTime Formatters
|Formatter|Description|Example Usage|Example Result|
|-|-|-|-|
|s|Sortable date/time pattern.|\<file date[s]\>|2023-02-14T13:45:30|
|Y|Year month pattern.|\<file date[Y]\>|February 2023|
### Custom DateTime Formatters
You can use custom formatters to construct customized DateTime string. For more custom formatters and examples, [see this guide from Microsoft](https://learn.microsoft.com/en-us/dotnet/standard/base-types/custom-date-and-time-format-strings).
|Formatter|Description|Example Usage|Example Result|
|-|-|-|-|
|yyyy|4-digit year|\<file date[yyyy]\>|2023|
|yy|2-digit year|\<file date[yy]\>|23|
|MM|2-digit month|\<file date[MM]\>|02|
|dd|2-digit day of the month|\<file date[yyyy-MM-dd]\>|2023-02-14|
|HH<br>mm|The hour, using a 24-hour clock from 00 to 23<br>The minute, from 00 through 59.|\<file date[HH:mm]\>|14:45|

View File

@ -1,6 +1,6 @@
## [Download Libation](https://github.com/rmcrackan/Libation/releases/latest) ## [Download Libation](https://github.com/rmcrackan/Libation/releases/latest)
### If you found this useful, tell a friend. If you found this REALLY useful, you can click here to [PayPal.me](https://paypal.me/mcrackan?locale.x=en_us) ### If you found this useful, tell a friend. If you found this REALLY useful, you can click here to [PalPal.me](https://paypal.me/mcrackan?locale.x=en_us)
...or just tell more friends. As long as I'm maintaining this software, it will remain **free** and **open source**. ...or just tell more friends. As long as I'm maintaining this software, it will remain **free** and **open source**.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

View File

@ -1,32 +0,0 @@
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 512 512" width="512px" enable-background="new 0 0 512 512">
<path id="slosh" transform=
"translate(-50 23)
scale(0.7, 0.7)
rotate(12 256,256)"
d=
"M139,2
A 192,200 0 0 0 103,84
A 222,334 41 0 0 241,320
V478
H160
A 16,16 0 0 0 160,510
H352
A16 16 0 0 0 352,478
H271
V320
A 222,334 -41 0 0 409,84
A 192,200 0 0 0 373,2
M355,32
A 192,200 0 0 1 381,127
A 187.5,334 -35 0 1 256,286
A 187.5,334 35 0 1 131,127
A 192,200 0 0 1 157,32
H355
M146,147
A 168,300 35 0 0 256,270
A 168,300 -35 0 0 366,128
S 360,50 280,110
S 192,128 147,147
z" />
<use href="#slosh" transform="translate(512 0) scale(-1 1)" />
</svg>

Before

Width:  |  Height:  |  Size: 736 B

View File

@ -1,41 +0,0 @@
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 288 288" enable-background="new 0 0 288 288">
<defs>
<g id="glass">
<path transform="translate(16 16)" fill-rule="evenodd" d=
"M177,16
H79
A 32.0781 63.7932 -1.5106 0 0 66 80
A 158.789 471.1259 41.9466 0 0 90 131
A 81.7197 122.0515 35.3745 0 0 128 143.3484
A 81.7197 122.0515 -35.3745 0 0 166 131
A 158.789 471.1259 -41.9466 0 0 190 80
A 32.0781 63.7932 1.5106 0 0 177 16
L 184 0
A 44.7901 78.5247 1.1521 0 1 194 122
A 97.0039 135.3148 -36.2124 0 1 136 159
V 240
H 176
A 8 8 0 0 1 176 256
H 80
A 8 8 0 0 1 80 240
H 120
V 159
A 97.0039 135.3148 36.2124 0 1 62 122
A 44.7901 78.5247 -1.1521 0 1 72 0
H184
z"/>
</g>
<g transform="translate(16 16)" id="wine-level">
<path d=
"M182,64
H 74
A 115.9979 308.8033 38.9474 0 0 128 134.4277
A 115.9979 308.8033 -38.9474 0 0 182,64
z"/>
</g>
</defs>
<use href="#glass" stroke="#ffffffa0" stroke-width="16" fill="Transparent" />
<use href="#wine-level" stroke="#ffffffa0" stroke-width="16" fill="Transparent" />
<use href="#glass" fill="Black" />
<use href="#wine-level" fill="Black" />
</svg>

Before

Width:  |  Height:  |  Size: 1.2 KiB

View File

@ -1,31 +0,0 @@
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 288 288" enable-background="new 0 0 288 288">
<g>
<path transform="rotate(90 128,128) translate(60 -16)" fill-rule="evenodd" d=
"M177,16
H79
A 32.0781 63.7932 -1.5106 0 0 66 80
A 158.789 471.1259 41.9466 0 0 90 131
A 81.7197 122.0515 35.3745 0 0 128 143.3484
A 81.7197 122.0515 -35.3745 0 0 166 131
A 158.789 471.1259 -41.9466 0 0 190 80
A 32.0781 63.7932 1.5106 0 0 177 16
L 184 0
A 44.7901 78.5247 1.1521 0 1 194 122
A 97.0039 135.3148 -36.2124 0 1 136 159
V 240
H 176
A 8 8 0 0 1 176 256
H 80
A 8 8 0 0 1 80 240
H 120
V 159
A 97.0039 135.3148 36.2124 0 1 62 122
A 44.7901 78.5247 -1.1521 0 1 72 0
H184
M170,115
V24
A 19.5181 45.9183 -3.3549 0 1 182.4322 69.5
A 19.5181 45.9183 3.3549 0 1 170 115
z"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 936 B

View File

@ -1,33 +0,0 @@
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 512 512" enable-background="new 0 0 512 512">
<path
transform=
"rotate(15 256,256)
translate(0 25)
scale(0.93, 0.93)"
d=
"M139,2
A 192,200 0 0 0 103,84
A 222,334 41 0 0 241,320
V478
H160
A 16,16 0 0 0 160,510
H352
A16 16 0 0 0 352,478
H271
V320
A 222,334 -41 0 0 409,84
A 192,200 0 0 0 373,2
M355,32
A 192,200 0 0 1 381,127
A 187.5,334 -35 0 1 256,286
A 187.5,334 35 0 1 131,127
A 192,200 0 0 1 157,32
H355
M146,147
A 168,300 35 0 0 256,270
A 168,300 -35 0 0 366,128
S 360,50 280,110
S 192,128 147,147
z" />
</svg>

Before

Width:  |  Height:  |  Size: 649 B

View File

@ -2,11 +2,9 @@
## [Download Libation](https://github.com/rmcrackan/Libation/releases/latest) ## [Download Libation](https://github.com/rmcrackan/Libation/releases/latest)
### If you found this useful, tell a friend. If you found this REALLY useful, you can click here to [PayPal.me](https://paypal.me/mcrackan?locale.x=en_us) ### If you found this useful, tell a friend. If you found this REALLY useful, you can click here to [PalPal.me](https://paypal.me/mcrackan?locale.x=en_us)
...or just tell more friends. As long as I'm maintaining this software, it will remain **free** and **open source**. ...or just tell more friends. As long as I'm maintaining this software, it will remain **free** and **open source**.
# Table of Contents # Table of Contents
- [Audible audiobook manager](#audible-audiobook-manager) - [Audible audiobook manager](#audible-audiobook-manager)
@ -22,7 +20,6 @@
- [Download PDF attachments](Documentation/GettingStarted.md#download-pdf-attachments) - [Download PDF attachments](Documentation/GettingStarted.md#download-pdf-attachments)
- [Details of downloaded files](Documentation/GettingStarted.md#details-of-downloaded-files) - [Details of downloaded files](Documentation/GettingStarted.md#details-of-downloaded-files)
- [Export your library](Documentation/GettingStarted.md#export-your-library) - [Export your library](Documentation/GettingStarted.md#export-your-library)
- If you still need help, [you can open an issue here](https://github.com/rmcrackan/Libation/issues) for bug reports, feature requests, or specialized help.
- [Searching and filtering](Documentation/SearchingAndFiltering.md) - [Searching and filtering](Documentation/SearchingAndFiltering.md)
- [Tags](Documentation/SearchingAndFiltering.md#tags) - [Tags](Documentation/SearchingAndFiltering.md#tags)
- [Searches](Documentation/SearchingAndFiltering.md#searches) - [Searches](Documentation/SearchingAndFiltering.md#searches)
@ -31,12 +28,8 @@
- [Advanced](Documentation/Advanced.md) - [Advanced](Documentation/Advanced.md)
- [Files and folders](Documentation/Advanced.md#files-and-folders) - [Files and folders](Documentation/Advanced.md#files-and-folders)
- [Settings](Documentation/Advanced.md#settings) - [Settings](Documentation/Advanced.md#settings)
- [Custom File Naming](Documentation/NamingTemplates.md) - [Custom File Naming](Documentation/Advanced.md#custom-file-naming)
- [Command Line Interface](Documentation/Advanced.md#command-line-interface) - [Command Line Interface](Documentation/Advanced.md#command-line-interface)
- [Custom Theme Colors](Documentation/Advanced.md#custom-theme-colors) (Chardonnay Only)
- [Audio Formats (Dolby Atmos, Widevine, Spacial Audio)](Documentation/AudioFileFormats.md)
- [Docker](Documentation/Docker.md)
- [Frequently Asked Questions](Documentation/FrequentlyAskedQuestions.md)
## Getting started ## Getting started
@ -55,12 +48,12 @@
* Customizable saved filters for common searches * Customizable saved filters for common searches
* Open source * Open source
* Supports most regions: US, UK, Canada, Germany, France, Australia, Japan, India, and Spain * Supports most regions: US, UK, Canada, Germany, France, Australia, Japan, India, and Spain
* Fully supported in Windows, Mac, and Linux
<a name="theBad"/> <a name="theBad"/>
### The bad ### The bad
* Only fully supported in Windows. (Mac and Linux are in beta)
* Large file size * Large file size
* Made by a programmer, not a designer so the goals are function rather than beauty. And it shows * Made by a programmer, not a designer so the goals are function rather than beauty. And it shows

View File

@ -1,137 +0,0 @@
#!/bin/bash
BIN_DIR=$1; shift
VERSION=$1; shift
ARCH=$1; shift
if [ -z "$BIN_DIR" ]
then
echo "This script must be called with a the Libation Linux bins directory as an argument."
exit
fi
if [ ! -d "$BIN_DIR" ]
then
echo "The directory \"$BIN_DIR\" does not exist."
exit
fi
if [ -z "$VERSION" ]
then
echo "This script must be called with the Libation version number as an argument."
exit
fi
if [ -z "$ARCH" ]
then
echo "This script must be called with the Libation cpu architecture as an argument."
exit
fi
contains() { case "$1" in *"$2"*) true ;; *) false ;; esac }
if ! contains "$BIN_DIR" "$ARCH"
then
echo "This script must be called with a Libation binaries for ${ARCH}."
exit
fi
ARCH=$(echo $ARCH | sed 's/x64/amd64/')
DEB_DIR=./deb
FOLDER_EXEC=$DEB_DIR/usr/lib/libation
echo "Exec dir: $FOLDER_EXEC"
mkdir -p $FOLDER_EXEC
echo "Moving bins from $BIN_DIR to $FOLDER_EXEC"
mv "${BIN_DIR}/"* $FOLDER_EXEC
if [ $? -ne 0 ]
then echo "Error moving ${BIN_DIR} files"
exit
fi
delfiles=('LinuxConfigApp' 'LinuxConfigApp.deps.json' 'LinuxConfigApp.runtimeconfig.json')
for n in "${delfiles[@]}"
do
echo "Deleting $n"
rm $FOLDER_EXEC/$n
done
FOLDER_ICON=$DEB_DIR/usr/share/icons/hicolor/scalable/apps/
echo "Icon dir: $FOLDER_ICON"
FOLDER_DESKTOP=$DEB_DIR/usr/share/applications
echo "Desktop dir: $FOLDER_DESKTOP"
FOLDER_DEBIAN=$DEB_DIR/DEBIAN
echo "Debian dir: $FOLDER_DEBIAN"
mkdir -p $FOLDER_ICON
mkdir -p $FOLDER_DESKTOP
mkdir -p $FOLDER_DEBIAN
echo "Copying icon..."
cp $FOLDER_EXEC/libation_glass.svg $FOLDER_ICON/libation.svg
echo "Copying desktop file..."
cp $FOLDER_EXEC/Libation.desktop $FOLDER_DESKTOP/Libation.desktop
echo "Creating pre-install file..."
echo "#!/bin/bash
# Pre-install script, removes previous installation program files and sym links
echo \"Removing previously created symlinks...\"
rm /usr/bin/libation
rm /usr/bin/hangover
rm /usr/bin/libationcli
echo \"Removing previously installed Libation files...\"
rm -r /usr/lib/libation
# making sure it won't stop installation
exit 0
" >> $FOLDER_DEBIAN/preinst
echo "Creating post-install file..."
echo "#!/bin/bash
gtk-update-icon-cache -f /usr/share/icons/hicolor/
ln -s /usr/lib/libation/Libation /usr/bin/libation
ln -s /usr/lib/libation/Hangover /usr/bin/hangover
ln -s /usr/lib/libation/LibationCli /usr/bin/libationcli
# Increase the maximum number of inotify instances
if ! grep -q 'fs.inotify.max_user_instances=524288' /etc/sysctl.conf; then
echo fs.inotify.max_user_instances=524288 | tee -a /etc/sysctl.conf && sysctl -p
fi
" >> $FOLDER_DEBIAN/postinst
echo "Creating control file..."
echo "Package: Libation
Version: $VERSION
Architecture: $ARCH
Essential: no
Priority: optional
Maintainer: github.com/rmcrackan
Description: liberate your audiobooks
" >> $FOLDER_DEBIAN/control
echo "Changing permissions for pre- and post-install files..."
chmod +x "$FOLDER_DEBIAN/preinst"
chmod +x "$FOLDER_DEBIAN/postinst"
if [ "$(uname -s)" == "Darwin" ]; then
echo "macOS detected, installing dpkg"
brew install dpkg
fi
DEB_FILE=Libation.${VERSION}-linux-chardonnay-${ARCH}.deb
echo "Creating $DEB_FILE"
dpkg-deb -Zxz --build $DEB_DIR ./$DEB_FILE
echo "moving to ./bundle/$DEB_FILE"
mkdir bundle
mv $DEB_FILE ./bundle/$DEB_FILE
rm -r "$BIN_DIR"
echo "Done!"

View File

@ -1,107 +0,0 @@
#!/bin/bash
BIN_DIR=$1; shift
VERSION=$1; shift
ARCH=$1; shift
if [ -z "$BIN_DIR" ]
then
echo "This script must be called with a the Libation macos bins directory as an argument."
exit
fi
if [ ! -d "$BIN_DIR" ]
then
echo "The directory \"$BIN_DIR\" does not exist."
exit
fi
if [ -z $VERSION ]
then
echo "This script must be called with the Libation version number as an argument."
exit
fi
if [ -z $ARCH ]
then
echo "This script must be called with the Libation cpu architecture as an argument."
exit
fi
contains() { case "$1" in *"$2"*) true ;; *) false ;; esac }
if ! contains "$BIN_DIR" $ARCH
then
echo "This script must be called with a Libation binaries for ${ARCH}."
exit
fi
BUNDLE=./Libation.app
echo "Bundle dir: $BUNDLE"
if [[ -d $BUNDLE ]]
then
echo "$BUNDLE directory already exists, aborting."
exit
fi
BUNDLE_CONTENTS=$BUNDLE/Contents
echo "Bundle Contents dir: $BUNDLE_CONTENTS"
BUNDLE_RESOURCES=$BUNDLE_CONTENTS/Resources
echo "Resources dir: $BUNDLE_RESOURCES"
BUNDLE_MACOS=$BUNDLE_CONTENTS/MacOS
echo "MacOS dir: $BUNDLE_MACOS"
mkdir -p $BUNDLE_CONTENTS
mkdir -p $BUNDLE_RESOURCES
mkdir -p $BUNDLE_MACOS
mv "${BIN_DIR}/"* $BUNDLE_MACOS
if [ $? -ne 0 ]
then echo "Error moving ${BIN_DIR} files"
exit
fi
echo "Make fileicon executable..."
chmod +x $BUNDLE_MACOS/fileicon
echo "Moving icon..."
mv $BUNDLE_MACOS/libation.icns $BUNDLE_RESOURCES/libation.icns
echo "Moving Info.plist file..."
mv $BUNDLE_MACOS/Info.plist $BUNDLE_CONTENTS/Info.plist
PLIST_ARCH=$(echo $ARCH | sed 's/x64/x86_64/')
echo "Set LSArchitecturePriority to $PLIST_ARCH"
sed -i -e "s/ARCHITECTURE_STRING/$PLIST_ARCH/" $BUNDLE_CONTENTS/Info.plist
echo "Set CFBundleVersion to $VERSION"
sed -i -e "s/VERSION_STRING/$VERSION/" $BUNDLE_CONTENTS/Info.plist
delfiles=('MacOSConfigApp' 'MacOSConfigApp.deps.json' 'MacOSConfigApp.runtimeconfig.json')
for n in "${delfiles[@]}"
do
echo "Deleting $n"
rm $BUNDLE_MACOS/$n
done
APP_FILE=Libation.${VERSION}-macOS-chardonnay-${ARCH}.tgz
echo "Signing executables in: $BUNDLE"
codesign --force --deep -s - $BUNDLE
echo "Creating app bundle: $APP_FILE"
tar -czvf $APP_FILE $BUNDLE
mkdir bundle
echo "moving to ./bundle/$APP_FILE"
mv $APP_FILE ./bundle/$APP_FILE
rm -r $BUNDLE
echo "Done!"

View File

@ -1,141 +0,0 @@
#!/bin/bash
BIN_DIR=$1; shift
VERSION=$1; shift
ARCH=$1; shift
if [ -z "$BIN_DIR" ]
then
echo "This script must be called with a the Libation Linux bins directory as an argument."
exit
fi
if [ ! -d "$BIN_DIR" ]
then
echo "The directory \"$BIN_DIR\" does not exist."
exit
fi
if [ -z "$VERSION" ]
then
echo "This script must be called with the Libation version number as an argument."
exit
fi
if [ -z "$ARCH" ]
then
echo "This script must be called with the Libation cpu architecture as an argument."
exit
fi
contains() { case "$1" in *"$2"*) true ;; *) false ;; esac }
if ! contains "$BIN_DIR" "$ARCH"
then
echo "This script must be called with a Libation binaries for ${ARCH}."
exit
fi
BASEDIR=$(pwd)
delfiles=('LinuxConfigApp' 'LinuxConfigApp.deps.json' 'LinuxConfigApp.runtimeconfig.json')
if [[ "$ARCH" == "x64" ]]
then
ARCH_RPM="x86_64"
ARCH="amd64"
else
ARCH_RPM="aarch64"
fi
notinstalled=('libcoreclrtraceptprovider.so' 'libation_glass.svg' 'Libation.desktop')
mkdir -p ~/rpmbuild/SPECS
mkdir ~/rpmbuild/BUILD
mkdir ~/rpmbuild/RPMS
echo "Name: libation
Version: ${VERSION}
Release: 1
Summary: Liberate your Audible Library
License: GPLv3+
URL: https://github.com/rmcrackan/Libation
Source0: https://github.com/rmcrackan/Libation
Requires: bash
%define __os_install_post %{nil}
%description
Liberate your Audible Library
%install
mkdir -p %{buildroot}%{_libdir}/%{name}
mkdir -p %{buildroot}%{_datadir}/icons/hicolor/scalable/apps
mkdir -p %{buildroot}%{_datadir}/applications
if test -f 'libcoreclrtraceptprovider.so'; then
rm 'libcoreclrtraceptprovider.so'
fi
install -m 666 libation_glass.svg %{buildroot}%{_datadir}/icons/hicolor/scalable/apps/libation.svg
install -m 666 Libation.desktop %{buildroot}%{_datadir}/applications/Libation.desktop
rm libation_glass.svg
rm Libation.desktop
install * %{buildroot}%{_libdir}/%{name}/
%post
if [ \$1 -eq 1 ] ; then
# Initial installation
ln -s %{_libdir}/%{name}/Libation %{_bindir}/libation
ln -s %{_libdir}/%{name}/Hangover %{_bindir}/hangover
ln -s %{_libdir}/%{name}/LibationCli %{_bindir}/libationcli
gtk-update-icon-cache -f %{_datadir}/icons/hicolor/
if ! grep -q 'fs.inotify.max_user_instances=524288' /etc/sysctl.conf; then
echo fs.inotify.max_user_instances=524288 | tee -a /etc/sysctl.conf && sysctl -p
fi
fi
%postun
if [ \$1 -eq 0 ] ; then
# Uninstall
rm %{_bindir}/libation
rm %{_bindir}/hangover
rm %{_bindir}/libationcli
fi
%files
%{_datadir}/icons/hicolor/scalable/apps/libation.svg
%{_datadir}/applications/Libation.desktop" >> ~/rpmbuild/SPECS/libation.spec
cd "$BIN_DIR"
for f in *; do
if [[ " ${delfiles[*]} " =~ " ${f} " ]]; then
echo "Deleting $f"
elif [[ ! " ${notinstalled[*]} " =~ " ${f} " ]]; then
echo "%{_libdir}/%{name}/${f}" >> ~/rpmbuild/SPECS/libation.spec
cp $f ~/rpmbuild/BUILD/
else
cp $f ~/rpmbuild/BUILD/
fi
done
cd ~/rpmbuild/SPECS/
rpmbuild -bb --target $ARCH_RPM libation.spec
cd $BASEDIR
RPM_FILE=$(ls ~/rpmbuild/RPMS/${ARCH_RPM})
mkdir bundle
mv ~/rpmbuild/RPMS/${ARCH_RPM}/$RPM_FILE "./bundle/Libation.${VERSION}-linux-chardonnay-${ARCH}.rpm"

View File

@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFramework>net9.0</TargetFramework> <TargetFramework>net6.0</TargetFramework>
</PropertyGroup> </PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'"> <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
@ -13,7 +13,7 @@
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="AAXClean.Codecs" Version="2.0.2.2" /> <PackageReference Include="AAXClean.Codecs" Version="0.2.14" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>

View File

@ -1,21 +1,18 @@
using AAXClean; using System;
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using AAXClean;
using Dinah.Core.Net.Http;
#nullable enable
namespace AaxDecrypter namespace AaxDecrypter
{ {
public abstract class AaxcDownloadConvertBase : AudiobookDownloadBase public abstract class AaxcDownloadConvertBase : AudiobookDownloadBase
{ {
public event EventHandler<AppleTags>? RetrievedMetadata; public event EventHandler<AppleTags> RetrievedMetadata;
public Mp4File? AaxFile { get; private set; } protected AaxFile AaxFile;
protected Mp4Operation? AaxConversion { get; set; }
protected AaxcDownloadConvertBase(string outDirectory, string cacheDirectory, IDownloadOptions dlOptions) protected AaxcDownloadConvertBase(string outFileName, string cacheDirectory, IDownloadOptions dlOptions)
: base(outDirectory, cacheDirectory, dlOptions) { } : base(outFileName, cacheDirectory, dlOptions) { }
/// <summary>Setting cover art by this method will insert the art into the audiobook metadata</summary> /// <summary>Setting cover art by this method will insert the art into the audiobook metadata</summary>
public override void SetCoverArt(byte[] coverArt) public override void SetCoverArt(byte[] coverArt)
@ -25,67 +22,9 @@ namespace AaxDecrypter
AaxFile.AppleTags.Cover = coverArt; AaxFile.AppleTags.Cover = coverArt;
} }
public override async Task CancelAsync()
{
await base.CancelAsync();
await (AaxConversion?.CancelAsync() ?? Task.CompletedTask);
}
private Mp4File Open()
{
if (DownloadOptions.DecryptionKeys is not KeyData[] keys || keys.Length == 0)
throw new InvalidOperationException($"{nameof(DownloadOptions.DecryptionKeys)} cannot be null or empty for a '{DownloadOptions.InputType}' file.");
else if (DownloadOptions.InputType is FileType.Dash)
{
//We may have multiple keys , so use the key whose key ID matches
//the dash files default Key ID.
var keyIds = keys.Select(k => new Guid(k.KeyPart1, bigEndian: true)).ToArray();
var dash = new DashFile(InputFileStream);
var kidIndex = Array.IndexOf(keyIds, dash.Tenc.DefaultKID);
if (kidIndex == -1)
throw new InvalidOperationException($"None of the {keyIds.Length} key IDs match the dash file's default KeyID of {dash.Tenc.DefaultKID}");
keys[0] = keys[kidIndex];
var keyId = keys[kidIndex].KeyPart1;
var key = keys[kidIndex].KeyPart2 ?? throw new InvalidOperationException($"{nameof(DownloadOptions.DecryptionKeys)} for '{DownloadOptions.InputType}' must have a non-null decryption key (KeyPart2).");
dash.SetDecryptionKey(keyId, key);
WriteKeyFile($"KeyId={Convert.ToHexString(keyId)}{Environment.NewLine}Key={Convert.ToHexString(key)}");
return dash;
}
else if (DownloadOptions.InputType is FileType.Aax)
{
var aax = new AaxFile(InputFileStream);
var key = keys[0].KeyPart1;
aax.SetDecryptionKey(keys[0].KeyPart1);
WriteKeyFile($"ActivationBytes={Convert.ToHexString(key)}");
return aax;
}
else if (DownloadOptions.InputType is FileType.Aaxc)
{
var aax = new AaxFile(InputFileStream);
var key = keys[0].KeyPart1;
var iv = keys[0].KeyPart2 ?? throw new InvalidOperationException($"{nameof(DownloadOptions.DecryptionKeys)} for '{DownloadOptions.InputType}' must have a non-null initialization vector (KeyPart2).");
aax.SetDecryptionKey(keys[0].KeyPart1, iv);
WriteKeyFile($"Key={Convert.ToHexString(key)}{Environment.NewLine}IV={Convert.ToHexString(iv)}");
return aax;
}
else throw new InvalidOperationException($"{nameof(DownloadOptions.InputType)} of '{DownloadOptions.InputType}' is unknown.");
void WriteKeyFile(string contents)
{
var keyFile = Path.Combine(Path.ChangeExtension(InputFileStream.SaveFilePath, ".key"));
File.WriteAllText(keyFile, contents + Environment.NewLine);
OnTempFileCreated(new(keyFile));
}
}
protected bool Step_GetMetadata() protected bool Step_GetMetadata()
{ {
AaxFile = Open(); AaxFile = new AaxFile(InputFileStream);
RetrievedMetadata?.Invoke(this, AaxFile.AppleTags);
if (DownloadOptions.StripUnabridged) if (DownloadOptions.StripUnabridged)
{ {
@ -93,52 +32,76 @@ namespace AaxDecrypter
AaxFile.AppleTags.Album = AaxFile.AppleTags.Album?.Replace(" (Unabridged)", ""); AaxFile.AppleTags.Album = AaxFile.AppleTags.Album?.Replace(" (Unabridged)", "");
} }
if (DownloadOptions.FixupFile) //Finishing configuring lame encoder.
{ if (DownloadOptions.OutputFormat == OutputFormat.Mp3)
if (!string.IsNullOrWhiteSpace(AaxFile.AppleTags.Narrator)) MpegUtil.ConfigureLameOptions(
AaxFile.AppleTags.AppleListBox.EditOrAddTag("©wrt", AaxFile.AppleTags.Narrator); AaxFile,
DownloadOptions.LameConfig,
DownloadOptions.Downsample,
DownloadOptions.MatchSourceBitrate);
if (!string.IsNullOrWhiteSpace(AaxFile.AppleTags.Copyright))
AaxFile.AppleTags.Copyright = AaxFile.AppleTags.Copyright.Replace("(P)", "℗").Replace("&#169;", "©");
//Add audiobook shelf tags
//https://github.com/advplyr/audiobookshelf/issues/1794#issuecomment-1565050213
const string tagDomain = "com.pilabor.tone";
AaxFile.AppleTags.Title = DownloadOptions.Title;
if (DownloadOptions.Subtitle is string subtitle)
AaxFile.AppleTags.AppleListBox.EditOrAddFreeformTag(tagDomain, "SUBTITLE", subtitle);
if (DownloadOptions.Publisher is string publisher)
AaxFile.AppleTags.AppleListBox.EditOrAddFreeformTag(tagDomain, "PUBLISHER", publisher);
if (DownloadOptions.Language is string language)
AaxFile.AppleTags.AppleListBox.EditOrAddFreeformTag(tagDomain, "LANGUAGE", language);
if (DownloadOptions.AudibleProductId is string asin)
{
AaxFile.AppleTags.Asin = asin;
AaxFile.AppleTags.AppleListBox.EditOrAddTag("asin", asin);
AaxFile.AppleTags.AppleListBox.EditOrAddFreeformTag(tagDomain, "AUDIBLE_ASIN", asin);
}
if (DownloadOptions.SeriesName is string series)
AaxFile.AppleTags.AppleListBox.EditOrAddFreeformTag(tagDomain, "SERIES", series);
if (DownloadOptions.SeriesNumber is string part)
AaxFile.AppleTags.AppleListBox.EditOrAddFreeformTag(tagDomain, "PART", part);
}
OnRetrievedTitle(AaxFile.AppleTags.TitleSansUnabridged); OnRetrievedTitle(AaxFile.AppleTags.TitleSansUnabridged);
OnRetrievedAuthors(AaxFile.AppleTags.FirstAuthor); OnRetrievedAuthors(AaxFile.AppleTags.FirstAuthor ?? "[unknown]");
OnRetrievedNarrators(AaxFile.AppleTags.Narrator); OnRetrievedNarrators(AaxFile.AppleTags.Narrator ?? "[unknown]");
OnRetrievedCoverArt(AaxFile.AppleTags.Cover); OnRetrievedCoverArt(AaxFile.AppleTags.Cover);
OnInitialized();
RetrievedMetadata?.Invoke(this, AaxFile.AppleTags);
return !IsCanceled; return !IsCanceled;
} }
protected virtual void OnInitialized() { } protected DownloadProgress Step_DownloadAudiobook_Start()
{
var zeroProgress = new DownloadProgress
{
BytesReceived = 0,
ProgressPercentage = 0,
TotalBytesToReceive = InputFileStream.Length
};
OnDecryptProgressUpdate(zeroProgress);
AaxFile.SetDecryptionKey(DownloadOptions.AudibleKey, DownloadOptions.AudibleIV);
return zeroProgress;
}
protected void Step_DownloadAudiobook_End(DownloadProgress zeroProgress)
{
AaxFile.Close();
CloseInputFileStream();
OnDecryptProgressUpdate(zeroProgress);
}
protected void AaxFile_ConversionProgressUpdate(object sender, ConversionProgressEventArgs e)
{
var duration = AaxFile.Duration;
var remainingSecsToProcess = (duration - e.ProcessPosition).TotalSeconds;
var estTimeRemaining = remainingSecsToProcess / e.ProcessSpeed;
if (double.IsNormal(estTimeRemaining))
OnDecryptTimeRemaining(TimeSpan.FromSeconds(estTimeRemaining));
var progressPercent = (e.ProcessPosition / e.TotalDuration);
OnDecryptProgressUpdate(
new DownloadProgress
{
ProgressPercentage = 100 * progressPercent,
BytesReceived = (long)(InputFileStream.Length * progressPercent),
TotalBytesToReceive = InputFileStream.Length
});
}
public override async Task CancelAsync()
{
IsCanceled = true;
if (AaxFile != null)
await AaxFile.CancelAsync();
AaxFile?.Dispose();
CloseInputFileStream();
}
} }
} }

View File

@ -1,36 +1,66 @@
using AAXClean; using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using AAXClean;
using AAXClean.Codecs; using AAXClean.Codecs;
using FileManager; using FileManager;
using System;
using System.IO;
using System.Threading.Tasks;
#nullable enable
namespace AaxDecrypter namespace AaxDecrypter
{ {
public class AaxcDownloadMultiConverter : AaxcDownloadConvertBase public class AaxcDownloadMultiConverter : AaxcDownloadConvertBase
{ {
private static readonly TimeSpan minChapterLength = TimeSpan.FromSeconds(3); private static TimeSpan minChapterLength { get; } = TimeSpan.FromSeconds(3);
private FileStream? workingFileStream; private List<string> multiPartFilePaths { get; } = new List<string>();
public AaxcDownloadMultiConverter(string outDirectory, string cacheDirectory, IDownloadOptions dlOptions) public AaxcDownloadMultiConverter(string outFileName, string cacheDirectory, IDownloadOptions dlOptions)
: base(outDirectory, cacheDirectory, dlOptions) : base(outFileName, cacheDirectory, dlOptions) { }
public override async Task<bool> RunAsync()
{ {
AsyncSteps.Name = $"Download, Convert Aaxc To {DownloadOptions.OutputFormat}, and Split"; try
AsyncSteps["Step 1: Get Aaxc Metadata"] = () => Task.Run(Step_GetMetadata); {
AsyncSteps["Step 2: Download Decrypted Audiobook"] = Step_DownloadAndDecryptAudiobookAsync; Serilog.Log.Information("Begin download and convert Aaxc To {format}", DownloadOptions.OutputFormat);
//Step 1
Serilog.Log.Information("Begin Get Aaxc Metadata");
if (await Task.Run(Step_GetMetadata))
Serilog.Log.Information("Completed Get Aaxc Metadata");
else
{
Serilog.Log.Information("Failed to Complete Get Aaxc Metadata");
return false;
} }
protected override void OnInitialized() //Step 2
Serilog.Log.Information("Begin Download Decrypted Audiobook");
if (await Step_DownloadAudiobookAsMultipleFilesPerChapter())
Serilog.Log.Information("Completed Download Decrypted Audiobook");
else
{ {
//Finishing configuring lame encoder. Serilog.Log.Information("Failed to Complete Download Decrypted Audiobook");
if (DownloadOptions.OutputFormat == OutputFormat.Mp3) return false;
MpegUtil.ConfigureLameOptions( }
AaxFile,
DownloadOptions.LameConfig, //Step 3
DownloadOptions.Downsample, Serilog.Log.Information("Begin Cleanup");
DownloadOptions.MatchSourceBitrate, if (await Task.Run(Step_Cleanup))
chapters: null); Serilog.Log.Information("Completed Cleanup");
else
{
Serilog.Log.Information("Failed to Complete Cleanup");
return false;
}
Serilog.Log.Information("Completed download and convert Aaxc To {format}", DownloadOptions.OutputFormat);
return true;
}
catch (Exception ex)
{
Serilog.Log.Error(ex, "Error encountered in download and convert Aaxc To {format}", DownloadOptions.OutputFormat);
return false;
}
} }
/* /*
@ -57,9 +87,10 @@ The book will be split into the following files:
That naming may not be desirable for everyone, but it's an easy change to instead use the last of the combined chapter's title in the file name. That naming may not be desirable for everyone, but it's an easy change to instead use the last of the combined chapter's title in the file name.
*/ */
protected async override Task<bool> Step_DownloadAndDecryptAudiobookAsync() private async Task<bool> Step_DownloadAudiobookAsMultipleFilesPerChapter()
{ {
if (AaxFile is null) return false; var zeroProgress = Step_DownloadAudiobook_Start();
var chapters = DownloadOptions.ChapterInfo.Chapters; var chapters = DownloadOptions.ChapterInfo.Chapters;
// Ensure split files are at least minChapterLength in duration. // Ensure split files are at least minChapterLength in duration.
@ -82,76 +113,77 @@ That naming may not be desirable for everyone, but it's an easy change to instea
} }
} }
try // reset, just in case
{ multiPartFilePaths.Clear();
await (AaxConversion = decryptMultiAsync(AaxFile, splitChapters));
if (AaxConversion.IsCompletedSuccessfully) ConversionResult result;
await moveMoovToBeginning(AaxFile, workingFileStream?.Name);
return AaxConversion.IsCompletedSuccessfully; AaxFile.ConversionProgressUpdate += AaxFile_ConversionProgressUpdate;
} if (DownloadOptions.OutputFormat == OutputFormat.M4b)
finally result = await ConvertToMultiMp4a(splitChapters);
{ else
workingFileStream?.Dispose(); result = await ConvertToMultiMp3(splitChapters);
FinalizeDownload(); AaxFile.ConversionProgressUpdate -= AaxFile_ConversionProgressUpdate;
}
Step_DownloadAudiobook_End(zeroProgress);
return result == ConversionResult.NoErrorsDetected;
} }
private Mp4Operation decryptMultiAsync(Mp4File aaxFile, ChapterInfo splitChapters) private Task<ConversionResult> ConvertToMultiMp4a(ChapterInfo splitChapters)
{ {
var chapterCount = 0; var chapterCount = 0;
return return AaxFile.ConvertToMultiMp4aAsync
DownloadOptions.OutputFormat == OutputFormat.M4b
? aaxFile.ConvertToMultiMp4aAsync
( (
splitChapters, splitChapters,
newSplitCallback => newSplit(++chapterCount, splitChapters, newSplitCallback) newSplitCallback => Callback(++chapterCount, splitChapters, newSplitCallback),
) DownloadOptions.TrimOutputToChapterLength
: aaxFile.ConvertToMultiMp3Async
(
splitChapters,
newSplitCallback => newSplit(++chapterCount, splitChapters, newSplitCallback),
DownloadOptions.LameConfig
); );
}
void newSplit(int currentChapter, ChapterInfo splitChapters, INewSplitCallback newSplitCallback) private Task<ConversionResult> ConvertToMultiMp3(ChapterInfo splitChapters)
{
var chapterCount = 0;
return AaxFile.ConvertToMultiMp3Async
(
splitChapters,
newSplitCallback => Callback(++chapterCount, splitChapters, newSplitCallback),
DownloadOptions.LameConfig,
DownloadOptions.TrimOutputToChapterLength
);
}
private void Callback(int currentChapter, ChapterInfo splitChapters, NewMP3SplitCallback newSplitCallback)
=> Callback(currentChapter, splitChapters, newSplitCallback as NewSplitCallback);
private void Callback(int currentChapter, ChapterInfo splitChapters, NewSplitCallback newSplitCallback)
{ {
moveMoovToBeginning(aaxFile, workingFileStream?.Name).GetAwaiter().GetResult();
var newTempFile = GetNewTempFilePath(DownloadOptions.OutputFormat.ToString());
MultiConvertFileProperties props = new() MultiConvertFileProperties props = new()
{ {
OutputFileName = newTempFile.FilePath, OutputFileName = OutputFileName,
PartsPosition = currentChapter, PartsPosition = currentChapter,
PartsTotal = splitChapters.Count, PartsTotal = splitChapters.Count,
Title = newSplitCallback.Chapter?.Title, Title = newSplitCallback?.Chapter?.Title,
}; };
newSplitCallback.OutputFile = createOutputFileStream(props);
newSplitCallback.OutputFile = workingFileStream = createOutputFileStream(props); newSplitCallback.TrackTitle = DownloadOptions.GetMultipartTitleName(props);
newSplitCallback.TrackTitle = DownloadOptions.GetMultipartTitle(props);
newSplitCallback.TrackNumber = currentChapter; newSplitCallback.TrackNumber = currentChapter;
newSplitCallback.TrackCount = splitChapters.Count; newSplitCallback.TrackCount = splitChapters.Count;
OnTempFileCreated(newTempFile with { PartProperties = props });
} }
FileStream createOutputFileStream(MultiConvertFileProperties multiConvertFileProperties) private FileStream createOutputFileStream(MultiConvertFileProperties multiConvertFileProperties)
{ {
FileUtility.SaferDelete(multiConvertFileProperties.OutputFileName); var fileName = DownloadOptions.GetMultipartFileName(multiConvertFileProperties);
return File.Open(multiConvertFileProperties.OutputFileName, FileMode.OpenOrCreate, FileAccess.ReadWrite); fileName = FileUtility.GetValidFilename(fileName, DownloadOptions.ReplacementCharacters);
}
}
private Mp4Operation moveMoovToBeginning(Mp4File aaxFile, string? filename) multiPartFilePaths.Add(fileName);
{
if (DownloadOptions.OutputFormat is OutputFormat.M4b FileUtility.SaferDelete(fileName);
&& DownloadOptions.MoveMoovToBeginning
&& filename is not null var file = File.Open(fileName, FileMode.OpenOrCreate);
&& File.Exists(filename)) OnFileCreated(fileName);
{ return file;
return Mp4File.RelocateMoovAsync(filename);
}
else return Mp4Operation.FromCompleted(aaxFile);
} }
} }
} }

View File

@ -1,108 +1,113 @@
using AAXClean; using System;
using AAXClean.Codecs;
using Dinah.Core.Net.Http;
using FileManager;
using System;
using System.IO; using System.IO;
using System.Threading.Tasks; using System.Threading.Tasks;
using AAXClean;
using AAXClean.Codecs;
using FileManager;
#nullable enable
namespace AaxDecrypter namespace AaxDecrypter
{ {
public class AaxcDownloadSingleConverter : AaxcDownloadConvertBase public class AaxcDownloadSingleConverter : AaxcDownloadConvertBase
{ {
private readonly AverageSpeed averageSpeed = new(); public AaxcDownloadSingleConverter(string outFileName, string cacheDirectory, IDownloadOptions dlOptions)
private TempFile? outputTempFile; : base(outFileName, cacheDirectory, dlOptions) { }
public AaxcDownloadSingleConverter(string outDirectory, string cacheDirectory, IDownloadOptions dlOptions) public override async Task<bool> RunAsync()
: base(outDirectory, cacheDirectory, dlOptions)
{ {
var step = 1;
AsyncSteps.Name = $"Download and Convert Aaxc To {DownloadOptions.OutputFormat}";
AsyncSteps[$"Step {step++}: Get Aaxc Metadata"] = () => Task.Run(Step_GetMetadata);
AsyncSteps[$"Step {step++}: Download Decrypted Audiobook"] = Step_DownloadAndDecryptAudiobookAsync;
if (DownloadOptions.MoveMoovToBeginning && DownloadOptions.OutputFormat is OutputFormat.M4b)
AsyncSteps[$"Step {step++}: Move moov atom to beginning"] = Step_MoveMoov;
AsyncSteps[$"Step {step++}: Create Cue"] = Step_CreateCueAsync;
}
protected override void OnInitialized()
{
//Finishing configuring lame encoder.
if (DownloadOptions.OutputFormat == OutputFormat.Mp3)
MpegUtil.ConfigureLameOptions(
AaxFile,
DownloadOptions.LameConfig,
DownloadOptions.Downsample,
DownloadOptions.MatchSourceBitrate,
DownloadOptions.ChapterInfo);
}
protected async override Task<bool> Step_DownloadAndDecryptAudiobookAsync()
{
if (AaxFile is null) return false;
outputTempFile = GetNewTempFilePath(DownloadOptions.OutputFormat.ToString());
FileUtility.SaferDelete(outputTempFile.FilePath);
using var outputFile = File.Open(outputTempFile.FilePath, FileMode.OpenOrCreate, FileAccess.ReadWrite);
OnTempFileCreated(outputTempFile);
try try
{ {
await (AaxConversion = decryptAsync(AaxFile, outputFile)); Serilog.Log.Information("Begin download and convert Aaxc To {format}", DownloadOptions.OutputFormat);
return AaxConversion.IsCompletedSuccessfully; //Step 1
} Serilog.Log.Information("Begin Step 1: Get Aaxc Metadata");
finally if (await Task.Run(Step_GetMetadata))
Serilog.Log.Information("Completed Step 1: Get Aaxc Metadata");
else
{ {
FinalizeDownload(); Serilog.Log.Information("Failed to Complete Step 1: Get Aaxc Metadata");
} return false;
} }
private async Task<bool> Step_MoveMoov() //Step 2
Serilog.Log.Information("Begin Step 2: Download Decrypted Audiobook");
if (await Step_DownloadAudiobookAsSingleFile())
Serilog.Log.Information("Completed Step 2: Download Decrypted Audiobook");
else
{ {
if (outputTempFile is null) return false; Serilog.Log.Information("Failed to Complete Step 2: Download Decrypted Audiobook");
AaxConversion = Mp4File.RelocateMoovAsync(outputTempFile.FilePath); return false;
AaxConversion.ConversionProgressUpdate += AaxConversion_MoovProgressUpdate;
await AaxConversion;
AaxConversion.ConversionProgressUpdate -= AaxConversion_MoovProgressUpdate;
return AaxConversion.IsCompletedSuccessfully;
} }
private void AaxConversion_MoovProgressUpdate(object? sender, ConversionProgressEventArgs e) //Step 3
Serilog.Log.Information("Begin Step 3: Create Cue");
if (await Task.Run(Step_CreateCue))
Serilog.Log.Information("Completed Step 3: Create Cue");
else
{ {
averageSpeed.AddPosition(e.ProcessPosition.TotalSeconds); Serilog.Log.Information("Failed to Complete Step 3: Create Cue");
return false;
var remainingTimeToProcess = (e.EndTime - e.ProcessPosition).TotalSeconds;
var estTimeRemaining = remainingTimeToProcess / averageSpeed.Average;
if (double.IsNormal(estTimeRemaining))
OnDecryptTimeRemaining(TimeSpan.FromSeconds(estTimeRemaining));
OnDecryptProgressUpdate(
new DownloadProgress
{
ProgressPercentage = 100 * e.FractionCompleted,
BytesReceived = (long)(InputFileStream.Length * e.FractionCompleted),
TotalBytesToReceive = InputFileStream.Length
});
} }
private Mp4Operation decryptAsync(Mp4File aaxFile, Stream outputFile) //Step 4
=> DownloadOptions.OutputFormat == OutputFormat.Mp3 Serilog.Log.Information("Begin Step 4: Cleanup");
? aaxFile.ConvertToMp3Async if (await Task.Run(Step_Cleanup))
Serilog.Log.Information("Completed Step 4: Cleanup");
else
{
Serilog.Log.Information("Failed to Complete Step 4: Cleanup");
return false;
}
Serilog.Log.Information("Completed download and convert Aaxc To {format}", DownloadOptions.OutputFormat);
return true;
}
catch (Exception ex)
{
Serilog.Log.Error(ex, "Error encountered in download and convert Aaxc To {format}", DownloadOptions.OutputFormat);
return false;
}
}
private async Task<bool> Step_DownloadAudiobookAsSingleFile()
{
var zeroProgress = Step_DownloadAudiobook_Start();
FileUtility.SaferDelete(OutputFileName);
var outputFile = File.Open(OutputFileName, FileMode.OpenOrCreate, FileAccess.ReadWrite);
OnFileCreated(OutputFileName);
AaxFile.ConversionProgressUpdate += AaxFile_ConversionProgressUpdate;
ConversionResult decryptionResult = await decryptAsync(outputFile);
AaxFile.ConversionProgressUpdate -= AaxFile_ConversionProgressUpdate;
Step_DownloadAudiobook_End(zeroProgress);
var success = decryptionResult == ConversionResult.NoErrorsDetected && !IsCanceled;
if (success)
base.OnFileCreated(OutputFileName);
return success;
}
private Task<ConversionResult> decryptAsync(Stream outputFile)
=> DownloadOptions.OutputFormat == OutputFormat.Mp3 ?
AaxFile.ConvertToMp3Async
( (
outputFile, outputFile,
DownloadOptions.LameConfig, DownloadOptions.LameConfig,
DownloadOptions.ChapterInfo DownloadOptions.ChapterInfo,
DownloadOptions.TrimOutputToChapterLength
) )
: DownloadOptions.FixupFile : DownloadOptions.FixupFile ?
? aaxFile.ConvertToMp4aAsync AaxFile.ConvertToMp4aAsync
( (
outputFile, outputFile,
DownloadOptions.ChapterInfo DownloadOptions.ChapterInfo,
DownloadOptions.TrimOutputToChapterLength
) )
: aaxFile.ConvertToMp4aAsync(outputFile); : AaxFile.ConvertToMp4aAsync(outputFile);
} }
} }

View File

@ -1,218 +1,168 @@
using Dinah.Core; using System;
using Dinah.Core.Net.Http; using System.Collections.Generic;
using Dinah.Core.StepRunner;
using FileManager;
using System;
using System.IO; using System.IO;
using System.Threading.Tasks; using System.Threading.Tasks;
using Dinah.Core;
using Dinah.Core.Net.Http;
using FileManager;
#nullable enable
namespace AaxDecrypter namespace AaxDecrypter
{ {
public enum OutputFormat { M4b, Mp3 } public enum OutputFormat { M4b, Mp3 }
public abstract class AudiobookDownloadBase public abstract class AudiobookDownloadBase
{ {
public event EventHandler<string?>? RetrievedTitle; public event EventHandler<string> RetrievedTitle;
public event EventHandler<string?>? RetrievedAuthors; public event EventHandler<string> RetrievedAuthors;
public event EventHandler<string?>? RetrievedNarrators; public event EventHandler<string> RetrievedNarrators;
public event EventHandler<byte[]?>? RetrievedCoverArt; public event EventHandler<byte[]> RetrievedCoverArt;
public event EventHandler<DownloadProgress>? DecryptProgressUpdate; public event EventHandler<DownloadProgress> DecryptProgressUpdate;
public event EventHandler<TimeSpan>? DecryptTimeRemaining; public event EventHandler<TimeSpan> DecryptTimeRemaining;
public event EventHandler<TempFile>? TempFileCreated; public event EventHandler<string> FileCreated;
public bool IsCanceled { get; protected set; } public bool IsCanceled { get; set; }
protected AsyncStepSequence AsyncSteps { get; } = new(); public string TempFilePath { get; }
protected string OutputDirectory { get; }
public IDownloadOptions DownloadOptions { get; }
protected NetworkFileStream InputFileStream => NfsPersister.NetworkFileStream;
protected virtual long InputFilePosition => InputFileStream.Position;
private bool downloadFinished;
private NetworkFileStreamPersister? m_nfsPersister; protected string OutputFileName { get; private set; }
private NetworkFileStreamPersister NfsPersister => m_nfsPersister ??= OpenNetworkFileStream(); protected IDownloadOptions DownloadOptions { get; }
private readonly DownloadProgress zeroProgress; protected NetworkFileStream InputFileStream => (nfsPersister ??= OpenNetworkFileStream()).NetworkFileStream;
private readonly string jsonDownloadState;
private readonly string tempFilePath;
protected AudiobookDownloadBase(string outDirectory, string cacheDirectory, IDownloadOptions dlOptions) // Don't give the property a 'set'. This should have to be an obvious choice; not accidental
protected void SetOutputFileName(string newOutputFileName) => OutputFileName = newOutputFileName;
private NetworkFileStreamPersister nfsPersister;
private string jsonDownloadState { get; }
protected AudiobookDownloadBase(string outFileName, string cacheDirectory, IDownloadOptions dlOptions)
{ {
OutputDirectory = ArgumentValidator.EnsureNotNullOrWhiteSpace(outDirectory, nameof(outDirectory)); OutputFileName = ArgumentValidator.EnsureNotNullOrWhiteSpace(outFileName, nameof(outFileName));
DownloadOptions = ArgumentValidator.EnsureNotNull(dlOptions, nameof(dlOptions));
DownloadOptions.DownloadSpeedChanged += (_, speed) => InputFileStream.SpeedLimit = speed;
if (!Directory.Exists(OutputDirectory)) var outDir = Path.GetDirectoryName(OutputFileName);
Directory.CreateDirectory(OutputDirectory); if (!Directory.Exists(outDir))
Directory.CreateDirectory(outDir);
if (!Directory.Exists(cacheDirectory)) if (!Directory.Exists(cacheDirectory))
Directory.CreateDirectory(cacheDirectory); Directory.CreateDirectory(cacheDirectory);
jsonDownloadState = Path.Combine(cacheDirectory, $"{DownloadOptions.AudibleProductId}.json"); jsonDownloadState = Path.Combine(cacheDirectory, Path.GetFileName(Path.ChangeExtension(OutputFileName, ".json")));
tempFilePath = Path.ChangeExtension(jsonDownloadState, ".aaxc"); TempFilePath = Path.ChangeExtension(jsonDownloadState, ".aaxc");
zeroProgress = new DownloadProgress DownloadOptions = ArgumentValidator.EnsureNotNull(dlOptions, nameof(dlOptions));
{
BytesReceived = 0,
ProgressPercentage = 0,
TotalBytesToReceive = 0
};
OnDecryptProgressUpdate(zeroProgress); // delete file after validation is complete
FileUtility.SaferDelete(OutputFileName);
} }
protected TempFile GetNewTempFilePath(string extension) public abstract Task CancelAsync();
public virtual void SetCoverArt(byte[] coverArt)
{ {
extension = FileUtility.GetStandardizedExtension(extension); if (coverArt is not null)
var path = Path.Combine(OutputDirectory, Guid.NewGuid().ToString("N") + extension); OnRetrievedCoverArt(coverArt);
return new(path, extension);
} }
public async Task<bool> RunAsync() public abstract Task<bool> RunAsync();
{
await InputFileStream.BeginDownloadingAsync();
var progressTask = Task.Run(reportProgress);
(bool success, var elapsed) = await AsyncSteps.RunAsync(); protected void OnRetrievedTitle(string title)
//Stop the downloader so it doesn't keep running in the background.
if (!success)
NfsPersister.Dispose();
await progressTask;
var speedup = DownloadOptions.RuntimeLength / elapsed;
Serilog.Log.Information($"Speedup is {speedup:F0}x realtime.");
NfsPersister.Dispose();
return success;
async Task reportProgress()
{
AverageSpeed averageSpeed = new();
while (
InputFileStream.CanRead
&& InputFileStream.Length > InputFilePosition
&& !InputFileStream.IsCancelled
&& !downloadFinished)
{
averageSpeed.AddPosition(InputFilePosition);
var estSecsRemaining = (InputFileStream.Length - InputFilePosition) / averageSpeed.Average;
if (double.IsNormal(estSecsRemaining))
OnDecryptTimeRemaining(TimeSpan.FromSeconds(estSecsRemaining));
var progressPercent = 100d * InputFilePosition / InputFileStream.Length;
OnDecryptProgressUpdate(
new DownloadProgress
{
ProgressPercentage = progressPercent,
BytesReceived = InputFilePosition,
TotalBytesToReceive = InputFileStream.Length
});
await Task.Delay(200);
}
OnDecryptTimeRemaining(TimeSpan.Zero);
OnDecryptProgressUpdate(zeroProgress);
}
}
public virtual Task CancelAsync()
{
IsCanceled = true;
FinalizeDownload();
return Task.CompletedTask;
}
protected abstract Task<bool> Step_DownloadAndDecryptAudiobookAsync();
public virtual void SetCoverArt(byte[] coverArt) { }
protected void OnRetrievedTitle(string? title)
=> RetrievedTitle?.Invoke(this, title); => RetrievedTitle?.Invoke(this, title);
protected void OnRetrievedAuthors(string? authors) protected void OnRetrievedAuthors(string authors)
=> RetrievedAuthors?.Invoke(this, authors); => RetrievedAuthors?.Invoke(this, authors);
protected void OnRetrievedNarrators(string? narrators) protected void OnRetrievedNarrators(string narrators)
=> RetrievedNarrators?.Invoke(this, narrators); => RetrievedNarrators?.Invoke(this, narrators);
protected void OnRetrievedCoverArt(byte[]? coverArt) protected void OnRetrievedCoverArt(byte[] coverArt)
=> RetrievedCoverArt?.Invoke(this, coverArt); => RetrievedCoverArt?.Invoke(this, coverArt);
protected void OnDecryptProgressUpdate(DownloadProgress downloadProgress) protected void OnDecryptProgressUpdate(DownloadProgress downloadProgress)
=> DecryptProgressUpdate?.Invoke(this, downloadProgress); => DecryptProgressUpdate?.Invoke(this, downloadProgress);
protected void OnDecryptTimeRemaining(TimeSpan timeRemaining) protected void OnDecryptTimeRemaining(TimeSpan timeRemaining)
=> DecryptTimeRemaining?.Invoke(this, timeRemaining); => DecryptTimeRemaining?.Invoke(this, timeRemaining);
public void OnTempFileCreated(TempFile path) protected void OnFileCreated(string path)
=> TempFileCreated?.Invoke(this, path); => FileCreated?.Invoke(this, path);
protected virtual void FinalizeDownload() protected void CloseInputFileStream()
{ {
NfsPersister.Dispose(); nfsPersister?.NetworkFileStream?.Close();
downloadFinished = true; nfsPersister?.Dispose();
} }
protected async Task<bool> Step_CreateCueAsync() protected bool Step_CreateCue()
{ {
if (!DownloadOptions.CreateCueSheet) return !IsCanceled; if (!DownloadOptions.CreateCueSheet) return true;
if (DownloadOptions.ChapterInfo.Count <= 1)
{
Serilog.Log.Logger.Information($"Skipped creating .cue because book has no chapters.");
return !IsCanceled;
}
// not a critical step. its failure should not prevent future steps from running // not a critical step. its failure should not prevent future steps from running
try try
{ {
var tempFile = GetNewTempFilePath(".cue"); var path = Path.ChangeExtension(OutputFileName, ".cue");
await File.WriteAllTextAsync(tempFile.FilePath, Cue.CreateContents(Path.GetFileName(tempFile.FilePath), DownloadOptions.ChapterInfo)); path = FileUtility.GetValidFilename(path, DownloadOptions.ReplacementCharacters);
OnTempFileCreated(tempFile); File.WriteAllText(path, Cue.CreateContents(Path.GetFileName(OutputFileName), DownloadOptions.ChapterInfo));
OnFileCreated(path);
} }
catch (Exception ex) catch (Exception ex)
{ {
Serilog.Log.Logger.Error(ex, $"{nameof(Step_CreateCueAsync)} Failed"); Serilog.Log.Logger.Error(ex, $"{nameof(Step_CreateCue)}. FAILED");
} }
return !IsCanceled; return !IsCanceled;
} }
protected bool Step_Cleanup()
{
bool success = !IsCanceled;
if (success)
{
FileUtility.SaferDelete(jsonDownloadState);
if (DownloadOptions.AudibleKey is not null &&
DownloadOptions.AudibleIV is not null &&
DownloadOptions.RetainEncryptedFile)
{
string aaxPath = Path.ChangeExtension(TempFilePath, ".aax");
FileUtility.SaferMove(TempFilePath, aaxPath);
//Write aax decryption key
string keyPath = Path.ChangeExtension(aaxPath, ".key");
FileUtility.SaferDelete(keyPath);
File.WriteAllText(keyPath, $"Key={DownloadOptions.AudibleKey}\r\nIV={DownloadOptions.AudibleIV}");
OnFileCreated(aaxPath);
OnFileCreated(keyPath);
}
else
FileUtility.SaferDelete(TempFilePath);
}
return success;
}
private NetworkFileStreamPersister OpenNetworkFileStream() private NetworkFileStreamPersister OpenNetworkFileStream()
{ {
NetworkFileStreamPersister? nfsp = default; if (!File.Exists(jsonDownloadState))
return NewNetworkFilePersister();
try try
{ {
if (!File.Exists(jsonDownloadState)) var nfsp = new NetworkFileStreamPersister(jsonDownloadState);
return nfsp = newNetworkFilePersister(); // If More than ~1 hour has elapsed since getting the download url, it will expire.
// The new url will be to the same file.
nfsp = new NetworkFileStreamPersister(jsonDownloadState);
// The download url expires after 1 hour.
// The new url points to the same file.
nfsp.NetworkFileStream.SetUriForSameFile(new Uri(DownloadOptions.DownloadUrl)); nfsp.NetworkFileStream.SetUriForSameFile(new Uri(DownloadOptions.DownloadUrl));
return nfsp; return nfsp;
} }
catch catch
{ {
nfsp?.Target?.Dispose();
FileUtility.SaferDelete(jsonDownloadState); FileUtility.SaferDelete(jsonDownloadState);
FileUtility.SaferDelete(tempFilePath); FileUtility.SaferDelete(TempFilePath);
return nfsp = newNetworkFilePersister(); return NewNetworkFilePersister();
}
finally
{
//nfsp will only be null when an unhandled exception occurs. Let the caller handle it.
if (nfsp is not null)
{
nfsp.NetworkFileStream.RequestHeaders["User-Agent"] = DownloadOptions.UserAgent;
nfsp.NetworkFileStream.SpeedLimit = DownloadOptions.DownloadSpeedBps;
OnTempFileCreated(new(tempFilePath, DownloadOptions.InputType.ToString()));
OnTempFileCreated(new(jsonDownloadState));
} }
} }
NetworkFileStreamPersister newNetworkFilePersister() private NetworkFileStreamPersister NewNetworkFilePersister()
{ {
var networkFileStream = new NetworkFileStream(tempFilePath, new Uri(DownloadOptions.DownloadUrl), 0, new() { { "User-Agent", DownloadOptions.UserAgent } }); var headers = new System.Net.WebHeaderCollection
{
{ "User-Agent", DownloadOptions.UserAgent }
};
var networkFileStream = new NetworkFileStream(TempFilePath, new Uri(DownloadOptions.DownloadUrl), 0, headers);
return new NetworkFileStreamPersister(networkFileStream, jsonDownloadState); return new NetworkFileStreamPersister(networkFileStream, jsonDownloadState);
} }
} }
} }
}

View File

@ -1,171 +0,0 @@
using Dinah.Core;
using System;
using System.Collections.Generic;
using System.Linq;
namespace AaxDecrypter;
public static class LinqStats
{
public static (double mean, double stdDev) BasicStatisticsBy<T>(this IEnumerable<T> values, Func<T, double> selector)
{
var count = values.Count();
var mean = values.Average(selector);
return (mean, Math.Sqrt(values.Sum(s => Math.Pow(selector(s) - mean, 2)) / (count - 1)));
}
public static bool T_Test_2By<T>(this IEnumerable<T> values, Func<T, double> selector, IEnumerable<T> secondGroup, Significance confidence)
{
var n1 = values.Count();
var n2 = secondGroup.Count();
var n = n1 + n2;
if (n1 < 3 || n2 < 3) return false;
(var mean1, var stdDev1) = values.BasicStatisticsBy(selector);
(var mean2, var stdDev2) = secondGroup.BasicStatisticsBy(selector);
var pooledStdDev = Math.Sqrt((((n1 - 1) * (stdDev1 * stdDev1)) + ((n2 - 1) * (stdDev2 * stdDev2))) / (n1 + n2 - 2));
var testStat = Math.Abs(mean1 - mean2) / (pooledStdDev * Math.Sqrt(1d / n1 + 1d / n2));
var crit = T_Stat(Math.Min(n - 2, MAX_DEGREES_FREEDOM), confidence);
return testStat > crit;
}
public static bool T_Test_1By<T>(this IEnumerable<T> values, Func<T, double> selector, double testMean, Significance confidence)
{
var n = values.Count();
if (n < 2) return false;
(var sampleMean, var sampleStdDev) = values.BasicStatisticsBy(selector);
var testStat = Math.Abs(sampleMean - testMean) / (sampleStdDev / Math.Sqrt(n));
var crit = T_Stat(Math.Min(n - 1, MAX_DEGREES_FREEDOM), confidence);
return testStat > crit;
}
private static double T_Stat(int degreesFreedom, Significance confidence)
{
ArgumentValidator.EnsureBetweenInclusive(degreesFreedom, nameof(degreesFreedom), MIN_DEGREES_FREEDOM, MAX_DEGREES_FREEDOM);
return T_TABLE[(int)confidence][degreesFreedom - MIN_DEGREES_FREEDOM];
}
static LinqStats()
{
T_TABLE = new double[][] { T_Table_01, T_Table_05, T_Table_10, T_Table_15, T_Table_20, T_Table_25 };
}
private const int MIN_DEGREES_FREEDOM = 1;
private const int MAX_DEGREES_FREEDOM = 201;
/// <summary>
/// 2-tailed t-Distribution critical values at 75%, 80%, 85%,
/// 90%, 95%, and 99% confidence for 1 - 201 degrees of freedom.
/// </summary>
private readonly static double[][] T_TABLE;
private readonly static double[] T_Table_25 = { 2.414213562, 1.603567451, 1.422625281, 1.344397556, 1.300949037, 1.273349309, 1.254278682, 1.240318261, 1.229659173, 1.221255395, 1.214460246, 1.208852542, 1.204146242, 1.200140298, 1.196689284, 1.193685414, 1.191047107, 1.188711483, 1.186629298, 1.184761434, 1.183076432, 1.181548697, 1.180157199, 1.178884497, 1.177716003, 1.176639425, 1.175644329, 1.174721803, 1.173864189, 1.173064871, 1.1723181, 1.17161886, 1.170962753, 1.17034591, 1.169764906, 1.169216709, 1.168698615, 1.168208212, 1.167743338, 1.167302049, 1.166882595, 1.166483396, 1.166103019, 1.165740162, 1.165393644, 1.165062385, 1.164745398, 1.164441782, 1.164150707, 1.163871412, 1.163603196, 1.163345413, 1.163097467, 1.162858803, 1.162628911, 1.162407316, 1.162193577, 1.161987283, 1.161788052, 1.161595527, 1.161409375, 1.161229286, 1.161054967, 1.160886145, 1.160722566, 1.160563987, 1.160410184, 1.160260944, 1.160116066, 1.159975363, 1.159838656, 1.159705777, 1.159576569, 1.15945088, 1.15932857, 1.159209503, 1.159093552, 1.158980598, 1.158870524, 1.158763222, 1.158658589, 1.158556526, 1.15845694, 1.158359742, 1.158264847, 1.158172173, 1.158081645, 1.157993188, 1.157906731, 1.157822209, 1.157739556, 1.157658712, 1.157579617, 1.157502216, 1.157426454, 1.157352281, 1.157279646, 1.157208502, 1.157138804, 1.157070509, 1.157003573, 1.156937958, 1.156873624, 1.156810534, 1.156748653, 1.156687945, 1.156628379, 1.156569922, 1.156512543, 1.156456213, 1.156400904, 1.156346587, 1.156293237, 1.156240827, 1.156189334, 1.156138733, 1.156089001, 1.156040117, 1.155992058, 1.155944804, 1.155898335, 1.155852631, 1.155807674, 1.155763446, 1.155719928, 1.155677105, 1.155634959, 1.155593475, 1.155552637, 1.15551243, 1.155472839, 1.155433851, 1.155395452, 1.155357629, 1.155320368, 1.155283658, 1.155247486, 1.155211841, 1.15517671, 1.155142084, 1.15510795, 1.1550743, 1.155041122, 1.155008406, 1.154976144, 1.154944326, 1.154912942, 1.154881984, 1.154851443, 1.154821311, 1.15479158, 1.154762241, 1.154733287, 1.154704711, 1.154676505, 1.154648662, 1.154621175, 1.154594037, 1.154567242, 1.154540783, 1.154514654, 1.154488849, 1.154463361, 1.154438185, 1.154413316, 1.154388747, 1.154364474, 1.15434049, 1.154316792, 1.154293373, 1.154270229, 1.154247355, 1.154224746, 1.154202398, 1.154180307, 1.154158467, 1.154136875, 1.154115526, 1.154094417, 1.154073543, 1.1540529, 1.154032485, 1.154012294, 1.153992323, 1.153972568, 1.153953027, 1.153933695, 1.15391457, 1.153895647, 1.153876925, 1.153858399, 1.153840066, 1.153821925, 1.15380397, 1.153786201, 1.153768613, 1.153751204, 1.153733972, 1.153716914, 1.153700026 };
private readonly static double[] T_Table_20 = { 3.077683537, 1.885618083, 1.637744354, 1.533206274, 1.475884049, 1.439755747, 1.414923928, 1.39681531, 1.383028738, 1.372183641, 1.363430318, 1.356217334, 1.350171289, 1.345030374, 1.340605608, 1.336757167, 1.33337939, 1.330390944, 1.327728209, 1.325340707, 1.323187874, 1.321236742, 1.31946024, 1.317835934, 1.316345073, 1.314971864, 1.313702913, 1.312526782, 1.311433647, 1.310415025, 1.309463549, 1.308572793, 1.307737124, 1.306951587, 1.306211802, 1.305513886, 1.304854381, 1.304230204, 1.303638589, 1.303077053, 1.302543359, 1.302035487, 1.301551608, 1.30109006, 1.300649332, 1.300228048, 1.299824947, 1.299438879, 1.299068785, 1.298713694, 1.298372713, 1.298045016, 1.297729843, 1.297426488, 1.2971343, 1.296852673, 1.296581044, 1.29631889, 1.296065725, 1.295821094, 1.295584571, 1.295355762, 1.295134294, 1.29491982, 1.294712013, 1.294510568, 1.294315197, 1.294125629, 1.293941609, 1.293762898, 1.293589269, 1.293420507, 1.293256413, 1.293096793, 1.292941469, 1.292790268, 1.292643029, 1.292499597, 1.292359828, 1.292223583, 1.29209073, 1.291961144, 1.291834705, 1.291711301, 1.291590824, 1.291473171, 1.291358243, 1.291245948, 1.291136195, 1.291028899, 1.290923979, 1.290821356, 1.290720956, 1.290622708, 1.290526543, 1.290432395, 1.290340202, 1.290249904, 1.290161442, 1.290074761, 1.289989809, 1.289906533, 1.289824884, 1.289744816, 1.289666283, 1.289589241, 1.289513648, 1.289439464, 1.289366649, 1.289295166, 1.289224979, 1.289156054, 1.289088355, 1.289021851, 1.28895651, 1.288892302, 1.288829199, 1.288767171, 1.288706191, 1.288646234, 1.288587273, 1.288529284, 1.288472243, 1.288416127, 1.288360913, 1.288306581, 1.288253109, 1.288200477, 1.288148665, 1.288097654, 1.288047427, 1.287997964, 1.287949248, 1.287901264, 1.287853994, 1.287807422, 1.287761534, 1.287716314, 1.287671748, 1.287627821, 1.287584521, 1.287541833, 1.287499745, 1.287458245, 1.287417319, 1.287376957, 1.287337146, 1.287297876, 1.287259135, 1.287220914, 1.2871832, 1.287145985, 1.287109259, 1.287073012, 1.287037235, 1.287001918, 1.286967053, 1.286932631, 1.286898644, 1.286865084, 1.286831942, 1.286799212, 1.286766884, 1.286734952, 1.286703409, 1.286672248, 1.286641461, 1.286611042, 1.286580985, 1.286551283, 1.286521929, 1.286492918, 1.286464244, 1.286435901, 1.286407882, 1.286380184, 1.286352799, 1.286325724, 1.286298952, 1.286272479, 1.286246299, 1.286220408, 1.286194801, 1.286169474, 1.286144421, 1.286119638, 1.286095122, 1.286070867, 1.28604687, 1.286023127, 1.285999633, 1.285976384, 1.285953377, 1.285930609, 1.285908074, 1.285885771, 1.285863694, 1.285841842, 1.285820209, 1.285798794 };
private readonly static double[] T_Table_15 = { 4.16529977, 2.281930588, 1.924319657, 1.778192164, 1.699362566, 1.650173154, 1.616591737, 1.59222144, 1.573735785, 1.559235933, 1.547559766, 1.537956495, 1.529919606, 1.523095061, 1.517227969, 1.51213017, 1.507659754, 1.503707672, 1.500188756, 1.497035518, 1.494193795, 1.491619612, 1.489276897, 1.487135783, 1.485171326, 1.483362535, 1.481691617, 1.48014339, 1.478704821, 1.477364662, 1.47611315, 1.474941772, 1.473843072, 1.47281049, 1.471838233, 1.470921166, 1.470054719, 1.469234815, 1.468457801, 1.467720399, 1.467019655, 1.466352901, 1.465717725, 1.465111933, 1.464533534, 1.463980712, 1.463451805, 1.462945295, 1.46245979, 1.461994009, 1.461546775, 1.461117, 1.460703683, 1.460305896, 1.45992278, 1.459553538, 1.45919743, 1.458853767, 1.458521908, 1.458201256, 1.457891251, 1.457591373, 1.457301133, 1.457020074, 1.456747768, 1.45648381, 1.456227824, 1.455979454, 1.455738365, 1.455504241, 1.455276784, 1.455055715, 1.454840767, 1.45463169, 1.454428246, 1.454230212, 1.454037373, 1.453849529, 1.453666487, 1.453488066, 1.453314093, 1.453144404, 1.452978842, 1.452817259, 1.452659513, 1.452505469, 1.452354998, 1.452207977, 1.452064289, 1.451923821, 1.451786468, 1.451652126, 1.451520697, 1.451392088, 1.451266209, 1.451142973, 1.451022299, 1.450904108, 1.450788323, 1.450674871, 1.450563684, 1.450454694, 1.450347836, 1.450243048, 1.450140271, 1.450039448, 1.449940523, 1.449843444, 1.449748158, 1.449654617, 1.449562773, 1.449472581, 1.449383997, 1.449296977, 1.449211481, 1.449127468, 1.449044902, 1.448963744, 1.448883959, 1.448805513, 1.448728372, 1.448652503, 1.448577876, 1.44850446, 1.448432226, 1.448361146, 1.448291192, 1.448222337, 1.448154557, 1.448087826, 1.44802212, 1.447957415, 1.447893688, 1.447830919, 1.447769085, 1.447708165, 1.44764814, 1.44758899, 1.447530695, 1.447473238, 1.447416601, 1.447360765, 1.447305715, 1.447251433, 1.447197905, 1.447145113, 1.447093044, 1.447041682, 1.446991013, 1.446941023, 1.446891698, 1.446843026, 1.446794994, 1.446747588, 1.446700797, 1.446654609, 1.446609012, 1.446563996, 1.446519548, 1.446475659, 1.446432318, 1.446389514, 1.446347238, 1.44630548, 1.44626423, 1.44622348, 1.44618322, 1.446143442, 1.446104137, 1.446065296, 1.446026911, 1.445988975, 1.44595148, 1.445914417, 1.44587778, 1.445841561, 1.445805753, 1.445770349, 1.445735343, 1.445700727, 1.445666495, 1.445632641, 1.445599159, 1.445566042, 1.445533284, 1.445500881, 1.445468825, 1.445437112, 1.445405736, 1.445374691, 1.445343973, 1.445313576, 1.445283495, 1.445253726, 1.445224264, 1.445195103, 1.445166239, 1.445137668, 1.445109385, 1.445081387 };
private readonly static double[] T_Table_10 = { 6.313751515, 2.91998558, 2.353363435, 2.131846786, 2.015048373, 1.943180281, 1.894578605, 1.859548038, 1.833112933, 1.812461123, 1.795884819, 1.782287556, 1.770933396, 1.761310136, 1.753050356, 1.745883676, 1.739606726, 1.734063607, 1.729132812, 1.724718243, 1.720742903, 1.717144374, 1.713871528, 1.71088208, 1.708140761, 1.70561792, 1.703288446, 1.701130934, 1.699127027, 1.697260887, 1.695518783, 1.693888748, 1.692360309, 1.690924255, 1.689572458, 1.688297714, 1.68709362, 1.68595446, 1.684875122, 1.683851013, 1.682878002, 1.681952357, 1.681070703, 1.680229977, 1.679427393, 1.678660414, 1.677926722, 1.677224196, 1.676550893, 1.675905025, 1.67528495, 1.674689154, 1.674116237, 1.673564906, 1.673033965, 1.672522303, 1.672028888, 1.671552762, 1.671093032, 1.670648865, 1.670219484, 1.669804163, 1.669402222, 1.669013025, 1.668635976, 1.668270514, 1.667916114, 1.667572281, 1.667238549, 1.666914479, 1.666599658, 1.666293696, 1.665996224, 1.665706893, 1.665425373, 1.665151353, 1.664884537, 1.664624645, 1.664371409, 1.664124579, 1.663883913, 1.663649184, 1.663420175, 1.663196679, 1.6629785, 1.662765449, 1.662557349, 1.662354029, 1.662155326, 1.661961084, 1.661771155, 1.661585397, 1.661403674, 1.661225855, 1.661051817, 1.66088144, 1.66071461, 1.660551217, 1.660391156, 1.660234326, 1.66008063, 1.659929976, 1.659782273, 1.659637437, 1.659495383, 1.659356034, 1.659219312, 1.659085144, 1.658953458, 1.658824187, 1.658697265, 1.658572629, 1.658450216, 1.658329969, 1.65821183, 1.658095744, 1.657981659, 1.657869522, 1.657759285, 1.657650899, 1.657544319, 1.657439499, 1.657336397, 1.65723497, 1.657135178, 1.657036982, 1.656940344, 1.656845226, 1.656751594, 1.656659413, 1.656568649, 1.65647927, 1.656391244, 1.656304542, 1.656219133, 1.656134988, 1.65605208, 1.655970382, 1.655889868, 1.655810511, 1.655732287, 1.655655173, 1.655579143, 1.655504177, 1.655430251, 1.655357345, 1.655285437, 1.655214506, 1.655144534, 1.6550755, 1.655007387, 1.654940175, 1.654873847, 1.654808385, 1.654743774, 1.654679996, 1.654617035, 1.654554875, 1.654493503, 1.654432901, 1.654373057, 1.654313957, 1.654255585, 1.654197929, 1.654140976, 1.654084713, 1.654029128, 1.653974208, 1.653919942, 1.653866317, 1.653813324, 1.653760949, 1.653709184, 1.653658017, 1.653607437, 1.653557435, 1.653508002, 1.653459126, 1.6534108, 1.653363013, 1.653315758, 1.653269024, 1.653222803, 1.653177088, 1.653131869, 1.653087138, 1.653042889, 1.652999113, 1.652955802, 1.652912949, 1.652870547, 1.652828589, 1.652787068, 1.652745977, 1.65270531, 1.652665059, 1.652625219, 1.652585784, 1.652546746, 1.652508101 };
private readonly static double[] T_Table_05 = { 12.70620474, 4.30265273, 3.182446305, 2.776445105, 2.570581836, 2.446911851, 2.364624252, 2.306004135, 2.262157163, 2.228138852, 2.20098516, 2.17881283, 2.160368656, 2.144786688, 2.131449546, 2.119905299, 2.109815578, 2.10092204, 2.093024054, 2.085963447, 2.079613845, 2.073873068, 2.06865761, 2.063898562, 2.059538553, 2.055529439, 2.051830516, 2.048407142, 2.045229642, 2.042272456, 2.039513446, 2.036933343, 2.034515297, 2.032244509, 2.030107928, 2.028094001, 2.026192463, 2.024394164, 2.02269092, 2.02107539, 2.01954097, 2.018081703, 2.016692199, 2.015367574, 2.014103389, 2.012895599, 2.011740514, 2.010634758, 2.009575237, 2.008559112, 2.00758377, 2.006646805, 2.005745995, 2.004879288, 2.004044783, 2.003240719, 2.002465459, 2.001717484, 2.000995378, 2.000297822, 1.999623585, 1.998971517, 1.998340543, 1.997729654, 1.997137908, 1.996564419, 1.996008354, 1.995468931, 1.994945415, 1.994437112, 1.993943368, 1.993463567, 1.992997126, 1.992543495, 1.992102154, 1.99167261, 1.991254395, 1.990847069, 1.99045021, 1.990063421, 1.989686323, 1.989318557, 1.98895978, 1.988609667, 1.988267907, 1.987934206, 1.987608282, 1.987289865, 1.9869787, 1.986674541, 1.986377154, 1.986086317, 1.985801814, 1.985523442, 1.985251004, 1.984984312, 1.984723186, 1.984467455, 1.984216952, 1.983971519, 1.983731003, 1.983495259, 1.983264145, 1.983037526, 1.982815274, 1.982597262, 1.98238337, 1.982173483, 1.98196749, 1.981765282, 1.981566757, 1.981371815, 1.981180359, 1.980992298, 1.980807541, 1.980626002, 1.980447599, 1.980272249, 1.980099876, 1.979930405, 1.979763763, 1.979599878, 1.979438685, 1.979280117, 1.979124109, 1.978970602, 1.978819535, 1.97867085, 1.978524491, 1.978380405, 1.978238539, 1.978098842, 1.977961264, 1.977825758, 1.977692277, 1.977560777, 1.977431212, 1.977303542, 1.977177724, 1.97705372, 1.976931489, 1.976810994, 1.976692198, 1.976575066, 1.976459563, 1.976345655, 1.976233309, 1.976122494, 1.976013178, 1.975905331, 1.975798924, 1.975693928, 1.975590315, 1.975488058, 1.975387131, 1.975287508, 1.975189163, 1.975092073, 1.974996213, 1.97490156, 1.974808092, 1.974715786, 1.974624621, 1.974534576, 1.97444563, 1.974357764, 1.974270957, 1.974185191, 1.974100447, 1.974016708, 1.973933954, 1.973852169, 1.973771337, 1.97369144, 1.973612462, 1.973534388, 1.973457202, 1.973380889, 1.973305434, 1.973230823, 1.973157042, 1.973084077, 1.973011915, 1.972940542, 1.972869946, 1.972800114, 1.972731033, 1.972662692, 1.972595079, 1.972528182, 1.97246199, 1.972396491, 1.972331676, 1.972267533, 1.972204051, 1.972141222, 1.972079034, 1.972017478, 1.971956544, 1.971896224 };
private readonly static double[] T_Table_01 = { 63.65674116, 9.924843201, 5.84090931, 4.604094871, 4.032142984, 3.707428021, 3.499483297, 3.355387331, 3.249835542, 3.169272673, 3.105806516, 3.054539589, 3.012275839, 2.976842734, 2.946712883, 2.920781622, 2.89823052, 2.878440473, 2.860934606, 2.84533971, 2.831359558, 2.818756061, 2.807335684, 2.796939505, 2.787435814, 2.778714533, 2.770682957, 2.763262455, 2.756385904, 2.749995654, 2.744041919, 2.738481482, 2.733276642, 2.728394367, 2.723805589, 2.71948463, 2.715408722, 2.711557602, 2.707913184, 2.704459267, 2.701181304, 2.698066186, 2.695102079, 2.692278266, 2.689585019, 2.687013492, 2.684555618, 2.682204027, 2.679951974, 2.677793271, 2.675722234, 2.673733631, 2.671822636, 2.669984796, 2.668215988, 2.666512398, 2.664870482, 2.663286954, 2.661758752, 2.660283029, 2.658857127, 2.657478565, 2.656145025, 2.654854337, 2.653604469, 2.652393515, 2.651219685, 2.650081299, 2.648976774, 2.647904624, 2.646863444, 2.645851913, 2.644868782, 2.643912872, 2.642983067, 2.642078313, 2.641197611, 2.640340015, 2.639504627, 2.638690596, 2.637897113, 2.63712341, 2.636368757, 2.635632458, 2.634913852, 2.634212309, 2.633527229, 2.632858038, 2.632204191, 2.631565166, 2.630940463, 2.630329608, 2.629732145, 2.629147638, 2.628575671, 2.628015844, 2.627467774, 2.626931096, 2.626405457, 2.625890521, 2.625385965, 2.624891476, 2.624406758, 2.623931523, 2.623465496, 2.623008411, 2.622560015, 2.622120061, 2.621688313, 2.621264543, 2.620848534, 2.620440073, 2.620038957, 2.619644989, 2.619257981, 2.618877749, 2.618504116, 2.618136914, 2.617775976, 2.617421145, 2.617072266, 2.616729191, 2.616391776, 2.616059883, 2.615733377, 2.615412127, 2.615096008, 2.614784899, 2.61447868, 2.614177238, 2.613880461, 2.613588242, 2.613300477, 2.613017065, 2.612737908, 2.61246291, 2.61219198, 2.611925028, 2.611661966, 2.611402711, 2.611147181, 2.610895295, 2.610646976, 2.61040215, 2.610160742, 2.609922682, 2.609687901, 2.609456331, 2.609227907, 2.609002566, 2.608780245, 2.608560883, 2.608344423, 2.608130807, 2.60791998, 2.607711886, 2.607506474, 2.607303692, 2.607103489, 2.606905817, 2.606710628, 2.606517876, 2.606327515, 2.606139501, 2.605953791, 2.605770342, 2.605589114, 2.605410067, 2.605233162, 2.605058359, 2.604885623, 2.604714916, 2.604546204, 2.60437945, 2.604214622, 2.604051686, 2.60389061, 2.603731363, 2.603573912, 2.603418229, 2.603264282, 2.603112045, 2.602961487, 2.602812582, 2.602665303, 2.602519622, 2.602375515, 2.602232955, 2.602091918, 2.60195238, 2.601814317, 2.601677705, 2.601542523, 2.601408747, 2.601276355, 2.601145327, 2.601015642, 2.600887278, 2.600760216, 2.600634436 };
}
public enum Significance
{
P01,
P05,
P10,
P15,
P20,
P25
}
public class AverageSpeed
{
/// <summary>Average speed in units per second</summary>
public double Average { get; private set; }
public TimeSpan SlowWindow { get; }
public TimeSpan FastWindow { get; }
public Significance SlowSignificance { get; }
public Significance FastSignificance { get; }
private DateTime start;
private TimeSpan lastTime;
private double lastPosition = double.NaN;
private readonly record struct Point(TimeSpan Time, double Velocity);
private readonly LinkedList<Point> speeds = new();
private const int MAX_SPEEDS = 200;
public AverageSpeed() : this(TimeSpan.FromSeconds(15), Significance.P10, TimeSpan.FromSeconds(3), Significance.P01) { }
/// <param name="slowWindow">Total moving average time window</param>
/// <param name="slowSignificance">T-test significance level at which the newest speed will be considered different from the slow window's mean speed.</param>
/// <param name="fastWindow">A shorter moving window of the most resent speeds. The average speed in <paramref name="fastWindow"/> is compared to the average speed in the rest of <paramref name="slowWindow"/> to quickly detect large changes in speed.</param>
/// <param name="fastSignificance">T-test significance level at which the mean speed in <paramref name="fastWindow"/> will be considered different from the mean speed of the remainder of <paramref name="slowWindow"/>.</param>
public AverageSpeed(TimeSpan slowWindow, Significance slowSignificance, TimeSpan fastWindow, Significance fastSignificance)
{
SlowWindow = ArgumentValidator.EnsureGreaterThan(slowWindow, nameof(slowWindow), fastWindow);
FastWindow = ArgumentValidator.EnsureGreaterThan(fastWindow, nameof(fastWindow), TimeSpan.Zero);
SlowSignificance = slowSignificance;
FastSignificance = fastSignificance;
}
/// <summary>Add a new position to the moving average</summary>
public void AddPosition(double position)
{
var now = DateTime.UtcNow;
if (start == default)
start = now;
var time = now - start;
while (speeds.Count > MAX_SPEEDS || (speeds.Count > 2 && time - speeds.First.Value.Time > SlowWindow))
speeds.RemoveFirst();
if (!double.IsNaN(lastPosition))
{
var newSpeed = (position - lastPosition) / (time - lastTime).TotalSeconds;
speeds.AddLast(new Point(time, newSpeed));
}
lastTime = time;
lastPosition = position;
Average = ComputeNextAverage();
}
private double ComputeNextAverage()
{
if (speeds.Count == 0)
return 0;
else if (speeds.Count == 1)
return speeds.Last.Value.Velocity;
else
{
var n_newest = speeds.Count(s => s.Time > lastTime.Subtract(FastWindow));
var n_oldest = speeds.Count - n_newest;
if (speeds.Take(n_oldest).T_Test_2By(s => s.Velocity, speeds.TakeLast(n_newest), FastSignificance))
{
//Speeds in FastWindow are significantly different from reset of speeds in SlowWindow.
//Discard older speeds and keep only speeds in FastWindow
for (; n_oldest > 0; n_oldest--)
speeds.RemoveFirst();
return speeds.Average(s => s.Velocity);
}
else
return
speeds.T_Test_1By(s => s.Velocity, Average, SlowSignificance)
? speeds.Average(s => s.Velocity)
: Average;
}
}
}

View File

@ -1,7 +1,8 @@
using AAXClean; using System;
using Dinah.Core;
using System.IO; using System.IO;
using System.Text; using System.Text;
using AAXClean;
using Dinah.Core;
namespace AaxDecrypter namespace AaxDecrypter
{ {
@ -15,14 +16,15 @@ namespace AaxDecrypter
var startOffset = chapters.StartOffset; var startOffset = chapters.StartOffset;
var trackCount = 1; var trackCount = 0;
foreach (var c in chapters.Chapters) foreach (var c in chapters.Chapters)
{ {
var startTime = c.StartOffset - startOffset; var startTime = c.StartOffset - startOffset;
trackCount++;
stringBuilder.AppendLine($"TRACK {trackCount++} AUDIO"); stringBuilder.AppendLine($"TRACK {trackCount} AUDIO");
stringBuilder.AppendLine($" TITLE \"{c.Title}\""); stringBuilder.AppendLine($" TITLE \"{c.Title}\"");
stringBuilder.AppendLine($" INDEX 01 {(int)startTime.TotalMinutes}:{startTime:ss}:{(int)(startTime.Milliseconds * 75d / 1000):D2}"); stringBuilder.AppendLine($" INDEX 01 {(int)startTime.TotalMinutes}:{startTime:ss}:{(int)(startTime.Milliseconds / 1000d * 75)}");
} }
return stringBuilder.ToString(); return stringBuilder.ToString();
@ -44,7 +46,7 @@ namespace AaxDecrypter
for (var i = 0; i < cueContents.Length; i++) for (var i = 0; i < cueContents.Length; i++)
{ {
var line = cueContents[i]; var line = cueContents[i];
if (!line.Trim().StartsWith("FILE") || !line.Contains(' ')) if (!line.Trim().StartsWith("FILE") || !line.Contains(" "))
continue; continue;
var fileTypeBegins = line.LastIndexOf(" ") + 1; var fileTypeBegins = line.LastIndexOf(" ") + 1;

View File

@ -1,54 +1,25 @@
using AAXClean; using AAXClean;
using System;
#nullable enable
namespace AaxDecrypter namespace AaxDecrypter
{ {
public class KeyData
{
public byte[] KeyPart1 { get; }
public byte[]? KeyPart2 { get; }
public KeyData(byte[] keyPart1, byte[]? keyPart2 = null)
{
KeyPart1 = keyPart1;
KeyPart2 = keyPart2;
}
public KeyData(string keyPart1, string? keyPart2 = null)
{
ArgumentNullException.ThrowIfNull(keyPart1, nameof(keyPart1));
KeyPart1 = Convert.FromHexString(keyPart1);
if (keyPart2 != null)
KeyPart2 = Convert.FromHexString(keyPart2);
}
}
public interface IDownloadOptions public interface IDownloadOptions
{ {
event EventHandler<long> DownloadSpeedChanged; FileManager.ReplacementCharacters ReplacementCharacters { get; }
string DownloadUrl { get; } string DownloadUrl { get; }
string UserAgent { get; } string UserAgent { get; }
KeyData[]? DecryptionKeys { get; } string AudibleKey { get; }
TimeSpan RuntimeLength { get; } string AudibleIV { get; }
OutputFormat OutputFormat { get; } OutputFormat OutputFormat { get; }
bool TrimOutputToChapterLength { get; }
bool RetainEncryptedFile { get; }
bool StripUnabridged { get; } bool StripUnabridged { get; }
bool CreateCueSheet { get; } bool CreateCueSheet { get; }
long DownloadSpeedBps { get; }
ChapterInfo ChapterInfo { get; } ChapterInfo ChapterInfo { get; }
bool FixupFile { get; } bool FixupFile { get; }
string? AudibleProductId { get; } NAudio.Lame.LameConfig LameConfig { get; }
string? Title { get; }
string? Subtitle { get; }
string? Publisher { get; }
string? Language { get; }
string? SeriesName { get; }
string? SeriesNumber { get; }
NAudio.Lame.LameConfig? LameConfig { get; }
bool Downsample { get; } bool Downsample { get; }
bool MatchSourceBitrate { get; } bool MatchSourceBitrate { get; }
bool MoveMoovToBeginning { get; } string GetMultipartFileName(MultiConvertFileProperties props);
string GetMultipartTitle(MultiConvertFileProperties props); string GetMultipartTitleName(MultiConvertFileProperties props);
public FileType? InputType { get; }
} }
} }

View File

@ -1,69 +1,33 @@
using AAXClean; using AAXClean;
using AAXClean.Codecs;
using NAudio.Lame; using NAudio.Lame;
using System; using System;
using System.Linq;
namespace AaxDecrypter namespace AaxDecrypter
{ {
public static class MpegUtil public static class MpegUtil
{ {
private const string TagDomain = "com.pilabor.tone"; public static void ConfigureLameOptions(Mp4File mp4File, LameConfig lameConfig, bool downsample, bool matchSourceBitrate)
public static void ConfigureLameOptions(
Mp4File mp4File,
LameConfig lameConfig,
bool downsample,
bool matchSourceBitrate,
ChapterInfo chapters)
{ {
double bitrateMultiple = 1; double bitrateMultiple = 1;
if (mp4File.TimeScale < lameConfig.OutputSampleRate)
{
lameConfig.OutputSampleRate = mp4File.TimeScale;
}
else if (mp4File.TimeScale > lameConfig.OutputSampleRate)
{
bitrateMultiple *= (double)lameConfig.OutputSampleRate / mp4File.TimeScale;
}
if (mp4File.AudioChannels == 2) if (mp4File.AudioChannels == 2)
{ {
if (downsample) if (downsample)
bitrateMultiple /= 2; bitrateMultiple = 0.5;
else else
lameConfig.Mode = MPEGMode.Stereo; lameConfig.Mode = MPEGMode.Stereo;
} }
if (matchSourceBitrate) if (matchSourceBitrate)
{ {
int kbps = (int)Math.Round(mp4File.AverageBitrate * bitrateMultiple / 1024); int kbps = (int)(mp4File.AverageBitrate * bitrateMultiple / 1024);
if (lameConfig.VBR is null) if (lameConfig.VBR is null)
lameConfig.BitRate = kbps; lameConfig.BitRate = kbps;
else if (lameConfig.VBR == VBRMode.ABR) else if (lameConfig.VBR == VBRMode.ABR)
lameConfig.ABRRateKbps = kbps; lameConfig.ABRRateKbps = kbps;
} }
//Setup metadata tags
lameConfig.ID3 = mp4File.AppleTags.ToIDTags();
if (mp4File.AppleTags.AppleListBox.GetFreeformTagString(TagDomain, "SUBTITLE") is string subtitle)
lameConfig.ID3.Subtitle = subtitle;
if (mp4File.AppleTags.AppleListBox.GetFreeformTagString(TagDomain, "LANGUAGE") is string lang)
lameConfig.ID3.UserDefinedText.Add("LANGUAGE", lang);
if (mp4File.AppleTags.AppleListBox.GetFreeformTagString(TagDomain, "SERIES") is string series)
lameConfig.ID3.UserDefinedText.Add("SERIES", series);
if (mp4File.AppleTags.AppleListBox.GetFreeformTagString(TagDomain, "PART") is string part)
lameConfig.ID3.UserDefinedText.Add("PART", part);
if (chapters?.Count > 0)
{
var cue = Cue.CreateContents(lameConfig.ID3.Title + ".mp3", chapters);
lameConfig.ID3.UserDefinedText.Add("CUESHEET", cue);
}
} }
} }
} }

View File

@ -1,4 +1,6 @@
using System; using System;
using System.IO;
using FileManager;
namespace AaxDecrypter namespace AaxDecrypter
{ {
@ -8,6 +10,6 @@ namespace AaxDecrypter
public int PartsPosition { get; set; } public int PartsPosition { get; set; }
public int PartsTotal { get; set; } public int PartsTotal { get; set; }
public string Title { get; set; } public string Title { get; set; }
public DateTime FileDate { get; } = DateTime.Now;
} }
} }

View File

@ -1,111 +1,135 @@
using Dinah.Core; using Dinah.Core;
using Newtonsoft.Json; using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using System; using System;
using System.Collections.Generic;
using System.IO; using System.IO;
using System.Linq;
using System.Net; using System.Net;
using System.Net.Http;
using System.Threading; using System.Threading;
using System.Threading.Tasks;
namespace AaxDecrypter namespace AaxDecrypter
{ {
/// <summary>A resumable, simultaneous file downloader and reader. </summary> /// <summary>
/// A <see cref="CookieContainer"/> for a single Uri.
/// </summary>
public class SingleUriCookieContainer : CookieContainer
{
private Uri baseAddress;
public Uri Uri
{
get => baseAddress;
set
{
baseAddress = new UriBuilder(value.Scheme, value.Host).Uri;
}
}
public CookieCollection GetCookies()
{
return GetCookies(Uri);
}
}
/// <summary>
/// A resumable, simultaneous file downloader and reader.
/// </summary>
public class NetworkFileStream : Stream, IUpdatable public class NetworkFileStream : Stream, IUpdatable
{ {
public event EventHandler Updated; public event EventHandler Updated;
#region Public Properties #region Public Properties
/// <summary> Location to save the downloaded data. </summary> /// <summary>
/// Location to save the downloaded data.
/// </summary>
[JsonProperty(Required = Required.Always)] [JsonProperty(Required = Required.Always)]
public string SaveFilePath { get; } public string SaveFilePath { get; }
/// <summary> Http(s) address of the file to download. </summary> /// <summary>
/// Http(s) address of the file to download.
/// </summary>
[JsonProperty(Required = Required.Always)] [JsonProperty(Required = Required.Always)]
public Uri Uri { get; private set; } public Uri Uri { get; private set; }
/// <summary> Http headers to be sent to the server with the request. </summary> /// <summary>
/// All cookies set by caller or by the remote server.
/// </summary>
[JsonProperty(Required = Required.Always)] [JsonProperty(Required = Required.Always)]
public Dictionary<string, string> RequestHeaders { get; private set; } public SingleUriCookieContainer CookieContainer { get; }
/// <summary> The position in <see cref="SaveFilePath"/> that has been written and flushed to disk. </summary> /// <summary>
/// Http headers to be sent to the server with the request.
/// </summary>
[JsonProperty(Required = Required.Always)]
public WebHeaderCollection RequestHeaders { get; private set; }
/// <summary>
/// The position in <see cref="SaveFilePath"/> that has been written and flushed to disk.
/// </summary>
[JsonProperty(Required = Required.Always)] [JsonProperty(Required = Required.Always)]
public long WritePosition { get; private set; } public long WritePosition { get; private set; }
/// <summary> The total length of the <see cref="Uri"/> file to download. </summary> /// <summary>
/// The total length of the <see cref="Uri"/> file to download.
/// </summary>
[JsonProperty(Required = Required.Always)] [JsonProperty(Required = Required.Always)]
public long ContentLength { get; private set; } public long ContentLength { get; private set; }
[JsonIgnore]
public bool IsCancelled => _cancellationSource.IsCancellationRequested;
[JsonIgnore]
public Task DownloadTask { get; private set; }
private long _speedLimit = 0;
/// <summary>bytes per second</summary>
public long SpeedLimit { get => _speedLimit; set => _speedLimit = value <= 0 ? 0 : Math.Max(value, MIN_BYTES_PER_SECOND); }
#endregion #endregion
#region Private Properties #region Private Properties
private HttpWebRequest HttpRequest { get; set; }
private FileStream _writeFile { get; } private FileStream _writeFile { get; }
private FileStream _readFile { get; } private FileStream _readFile { get; }
private CancellationTokenSource _cancellationSource { get; } = new(); private Stream _networkStream { get; set; }
private EventWaitHandle _downloadedPiece { get; set; } private bool hasBegunDownloading { get; set; }
public bool IsCancelled { get; private set; }
private DateTime NextUpdateTime { get; set; } private EventWaitHandle downloadEnded { get; set; }
private EventWaitHandle downloadedPiece { get; set; }
#endregion #endregion
#region Constants #region Constants
//Download memory buffer size //Download buffer size
private const int DOWNLOAD_BUFF_SZ = 8 * 1024; private const int DOWNLOAD_BUFF_SZ = 32 * 1024;
//NetworkFileStream will flush all data in _writeFile to disk after every //NetworkFileStream will flush all data in _writeFile to disk after every
//DATA_FLUSH_SZ bytes are written to the file stream. //DATA_FLUSH_SZ bytes are written to the file stream.
private const int DATA_FLUSH_SZ = 1024 * 1024; private const int DATA_FLUSH_SZ = 1024 * 1024;
//Number of times per second the download rate is checked and throttled
private const int THROTTLE_FREQUENCY = 8;
//Minimum throttle rate. The minimum amount of data that can be throttled
//on each iteration of the download loop is DOWNLOAD_BUFF_SZ.
public const int MIN_BYTES_PER_SECOND = DOWNLOAD_BUFF_SZ * THROTTLE_FREQUENCY;
#endregion #endregion
#region Constructor #region Constructor
/// <summary> A resumable, simultaneous file downloader and reader. </summary> /// <summary>
/// A resumable, simultaneous file downloader and reader.
/// </summary>
/// <param name="saveFilePath">Path to a location on disk to save the downloaded data from <paramref name="uri"/></param> /// <param name="saveFilePath">Path to a location on disk to save the downloaded data from <paramref name="uri"/></param>
/// <param name="uri">Http(s) address of the file to download.</param> /// <param name="uri">Http(s) address of the file to download.</param>
/// <param name="writePosition">The position in <paramref name="uri"/> to begin downloading.</param> /// <param name="writePosition">The position in <paramref name="uri"/> to begin downloading.</param>
/// <param name="requestHeaders">Http headers to be sent to the server with the <see cref="HttpWebRequest"/>.</param> /// <param name="requestHeaders">Http headers to be sent to the server with the <see cref="HttpWebRequest"/>.</param>
public NetworkFileStream(string saveFilePath, Uri uri, long writePosition = 0, Dictionary<string, string> requestHeaders = null) /// <param name="cookies">A <see cref="SingleUriCookieContainer"/> with cookies to send with the <see cref="HttpWebRequest"/>. It will also be populated with any cookies set by the server. </param>
public NetworkFileStream(string saveFilePath, Uri uri, long writePosition = 0, WebHeaderCollection requestHeaders = null, SingleUriCookieContainer cookies = null)
{ {
SaveFilePath = ArgumentValidator.EnsureNotNullOrWhiteSpace(saveFilePath, nameof(saveFilePath)); ArgumentValidator.EnsureNotNullOrWhiteSpace(saveFilePath, nameof(saveFilePath));
Uri = ArgumentValidator.EnsureNotNull(uri, nameof(uri)); ArgumentValidator.EnsureNotNullOrWhiteSpace(uri?.AbsoluteUri, nameof(uri));
WritePosition = ArgumentValidator.EnsureGreaterThan(writePosition, nameof(writePosition), -1); ArgumentValidator.EnsureGreaterThan(writePosition, nameof(writePosition), -1);
if (!Directory.Exists(Path.GetDirectoryName(saveFilePath))) if (!Directory.Exists(Path.GetDirectoryName(saveFilePath)))
throw new ArgumentException($"Specified {nameof(saveFilePath)} directory \"{Path.GetDirectoryName(saveFilePath)}\" does not exist."); throw new ArgumentException($"Specified {nameof(saveFilePath)} directory \"{Path.GetDirectoryName(saveFilePath)}\" does not exist.");
RequestHeaders = requestHeaders ?? new(); SaveFilePath = saveFilePath;
Uri = uri;
WritePosition = writePosition;
RequestHeaders = requestHeaders ?? new WebHeaderCollection();
CookieContainer = cookies ?? new SingleUriCookieContainer { Uri = uri };
_writeFile = new FileStream(SaveFilePath, FileMode.OpenOrCreate, FileAccess.Write, FileShare.ReadWrite) _writeFile = new FileStream(SaveFilePath, FileMode.OpenOrCreate, FileAccess.Write, FileShare.ReadWrite)
{ {
Position = WritePosition Position = WritePosition
}; };
if (_writeFile.Length < WritePosition)
{
_writeFile.Dispose();
throw new InvalidDataException($"{SaveFilePath} file length is shorter than {WritePosition}");
}
_readFile = new FileStream(SaveFilePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); _readFile = new FileStream(SaveFilePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
SetUriForSameFile(uri); SetUriForSameFile(uri);
@ -115,19 +139,15 @@ namespace AaxDecrypter
#region Downloader #region Downloader
/// <summary> Update the <see cref="Dinah.Core.IO.JsonFilePersister{T}"/>. </summary> /// <summary>
private void OnUpdate(bool waitForWrite = false) /// Update the <see cref="JsonFilePersister"/>.
/// </summary>
private void Update()
{ {
RequestHeaders = HttpRequest.Headers;
try try
{
if (waitForWrite || DateTime.UtcNow > NextUpdateTime)
{ {
Updated?.Invoke(this, EventArgs.Empty); Updated?.Invoke(this, EventArgs.Empty);
//JsonFilePersister Will not allow update intervals shorter than 100 milliseconds
//If an update is called less than 100 ms since the last update, persister will
//sleep the thread until 100 ms has elapsed.
NextUpdateTime = DateTime.UtcNow.AddMilliseconds(110);
}
} }
catch (Exception ex) catch (Exception ex)
{ {
@ -135,185 +155,213 @@ namespace AaxDecrypter
} }
} }
/// <summary> Set a different <see cref="System.Uri"/> to the same file targeted by this instance of <see cref="NetworkFileStream"/> </summary> /// <summary>
/// Set a different <see cref="System.Uri"/> to the same file targeted by this instance of <see cref="NetworkFileStream"/>
/// </summary>
/// <param name="uriToSameFile">New <see cref="System.Uri"/> host must match existing host.</param> /// <param name="uriToSameFile">New <see cref="System.Uri"/> host must match existing host.</param>
public void SetUriForSameFile(Uri uriToSameFile) public void SetUriForSameFile(Uri uriToSameFile)
{ {
ArgumentValidator.EnsureNotNullOrWhiteSpace(uriToSameFile?.AbsoluteUri, nameof(uriToSameFile)); ArgumentValidator.EnsureNotNullOrWhiteSpace(uriToSameFile?.AbsoluteUri, nameof(uriToSameFile));
if (Path.GetFileName(uriToSameFile.LocalPath) != Path.GetFileName(Uri.LocalPath))
throw new ArgumentException($"New uri to the same file must have the same file name.");
if (uriToSameFile.Host != Uri.Host) if (uriToSameFile.Host != Uri.Host)
throw new ArgumentException($"New uri to the same file must have the same host.\r\n Old Host :{Uri.Host}\r\nNew Host: {uriToSameFile.Host}"); throw new ArgumentException($"New uri to the same file must have the same host.\r\n Old Host :{Uri.Host}\r\nNew Host: {uriToSameFile.Host}");
if (DownloadTask is not null) if (hasBegunDownloading)
throw new InvalidOperationException("Cannot change Uri after download has started."); throw new InvalidOperationException("Cannot change Uri after download has started.");
Uri = uriToSameFile; Uri = uriToSameFile;
HttpRequest = WebRequest.CreateHttp(Uri);
HttpRequest.CookieContainer = CookieContainer;
HttpRequest.Headers = RequestHeaders;
//If NetworkFileStream is resuming, Header will already contain a range.
HttpRequest.Headers.Remove("Range");
HttpRequest.AddRange(WritePosition);
} }
/// <summary> Begins downloading <see cref="Uri"/> to <see cref="SaveFilePath"/> in a background thread. </summary> /// <summary>
/// <returns>The downloader <see cref="Task"/></returns> /// Begins downloading <see cref="Uri"/> to <see cref="SaveFilePath"/> in a background thread.
public async Task BeginDownloadingAsync() /// </summary>
private void BeginDownloading()
{ {
downloadEnded = new EventWaitHandle(false, EventResetMode.ManualReset);
if (ContentLength != 0 && WritePosition == ContentLength) if (ContentLength != 0 && WritePosition == ContentLength)
{ {
DownloadTask = Task.CompletedTask; hasBegunDownloading = true;
downloadEnded.Set();
return; return;
} }
if (ContentLength != 0 && WritePosition > ContentLength) if (ContentLength != 0 && WritePosition > ContentLength)
throw new WebException($"Specified write position (0x{WritePosition:X10}) is larger than {nameof(ContentLength)} (0x{ContentLength:X10})."); throw new WebException($"Specified write position (0x{WritePosition:X10}) is larger than {nameof(ContentLength)} (0x{ContentLength:X10}).");
//Initiate connection with the first request block and var response = HttpRequest.GetResponse() as HttpWebResponse;
//get the total content length before returning.
var client = new HttpClient();
var response = await RequestNextByteRangeAsync(client);
if (ContentLength != 0 && ContentLength != response.FileSize)
throw new WebException($"Content length of 0x{response.FileSize:X10} differs from partially downloaded content length of 0x{ContentLength:X10}");
ContentLength = response.FileSize;
_downloadedPiece = new EventWaitHandle(false, EventResetMode.AutoReset);
//Hand off the client and the open request to the downloader to download and write data to file.
DownloadTask = Task.Run(() => DownloadLoopInternal(client , response), _cancellationSource.Token);
}
private async Task DownloadLoopInternal(HttpClient client, BlockResponse blockResponse)
{
try
{
long startPosition = WritePosition;
while (WritePosition < ContentLength && !IsCancelled)
{
try
{
await DownloadToFile(blockResponse);
}
catch (HttpIOException e)
when (e.HttpRequestError is HttpRequestError.ResponseEnded
&& WritePosition != startPosition
&& WritePosition < ContentLength && !IsCancelled)
{
Serilog.Log.Logger.Debug($"The download connection ended before the file completed downloading all 0x{ContentLength:X10} bytes");
//the download made *some* progress since the last attempt.
//Try again to complete the download from where it left off.
//Make sure to rewind file to last flush position.
_writeFile.Position = startPosition = WritePosition;
blockResponse.Dispose();
blockResponse = await RequestNextByteRangeAsync(client);
Serilog.Log.Logger.Debug($"Resuming the file download starting at position 0x{WritePosition:X10}.");
}
}
}
finally
{
_writeFile.Dispose();
blockResponse.Dispose();
client.Dispose();
}
}
private async Task<BlockResponse> RequestNextByteRangeAsync(HttpClient client)
{
using var request = new HttpRequestMessage(HttpMethod.Get, Uri);
//Just in case it snuck in the saved json (Issue #1232)
RequestHeaders.Remove("Range");
foreach (var header in RequestHeaders)
request.Headers.Add(header.Key, header.Value);
request.Headers.Add("Range", $"bytes={WritePosition}-");
var response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, _cancellationSource.Token);
if (response.StatusCode != HttpStatusCode.PartialContent) if (response.StatusCode != HttpStatusCode.PartialContent)
throw new WebException($"Server at {Uri.Host} responded with unexpected status code: {response.StatusCode}."); throw new WebException($"Server at {Uri.Host} responded with unexpected status code: {response.StatusCode}.");
var totalSize = response.Content.Headers.ContentRange?.Length ?? //Content length is the length of the range request, and it is only equal
throw new WebException("The response did not contain a total content length."); //to the complete file length if requesting Range: bytes=0-
if (WritePosition == 0)
ContentLength = response.ContentLength;
var rangeSize = response.Content.Headers.ContentLength ?? _networkStream = response.GetResponseStream();
throw new WebException($"The response did not contain a {nameof(response.Content.Headers.ContentLength)};"); downloadedPiece = new EventWaitHandle(false, EventResetMode.AutoReset);
return new BlockResponse(response, rangeSize, totalSize); //Download the file in the background.
new Thread(() => DownloadFile())
{ IsBackground = true }
.Start();
hasBegunDownloading = true;
return;
} }
private readonly record struct BlockResponse(HttpResponseMessage Response, long BlockSize, long FileSize) : IDisposable /// <summary>
/// Download <see cref="Uri"/> to <see cref="SaveFilePath"/>.
/// </summary>
private void DownloadFile()
{ {
public void Dispose() => Response?.Dispose();
}
/// <summary> Download <see cref="Uri"/> to <see cref="SaveFilePath"/>.</summary>
private async Task DownloadToFile(BlockResponse block)
{
var endPosition = WritePosition + block.BlockSize;
using var networkStream = await block.Response.Content.ReadAsStreamAsync(_cancellationSource.Token);
var downloadPosition = WritePosition; var downloadPosition = WritePosition;
var nextFlush = downloadPosition + DATA_FLUSH_SZ; var nextFlush = downloadPosition + DATA_FLUSH_SZ;
var buff = new byte[DOWNLOAD_BUFF_SZ]; var buff = new byte[DOWNLOAD_BUFF_SZ];
try try
{ {
DateTime startTime = DateTime.UtcNow;
long bytesReadSinceThrottle = 0;
int bytesRead; int bytesRead;
do do
{ {
bytesRead = await networkStream.ReadAsync(buff, _cancellationSource.Token); bytesRead = _networkStream.Read(buff, 0, DOWNLOAD_BUFF_SZ);
await _writeFile.WriteAsync(buff, 0, bytesRead, _cancellationSource.Token); _writeFile.Write(buff, 0, bytesRead);
downloadPosition += bytesRead; downloadPosition += bytesRead;
if (downloadPosition > nextFlush) if (downloadPosition > nextFlush)
{ {
await _writeFile.FlushAsync(_cancellationSource.Token); _writeFile.Flush();
WritePosition = downloadPosition; WritePosition = downloadPosition;
OnUpdate(); Update();
nextFlush = downloadPosition + DATA_FLUSH_SZ; nextFlush = downloadPosition + DATA_FLUSH_SZ;
_downloadedPiece.Set(); downloadedPiece.Set();
} }
#region throttle } while (downloadPosition < ContentLength && !IsCancelled && bytesRead > 0);
bytesReadSinceThrottle += bytesRead; _writeFile.Close();
_networkStream.Close();
WritePosition = downloadPosition;
Update();
if (SpeedLimit >= MIN_BYTES_PER_SECOND && bytesReadSinceThrottle > SpeedLimit / THROTTLE_FREQUENCY) if (!IsCancelled && WritePosition < ContentLength)
throw new WebException($"Downloaded size (0x{WritePosition:X10}) is less than {nameof(ContentLength)} (0x{ContentLength:X10}).");
if (WritePosition > ContentLength)
throw new WebException($"Downloaded size (0x{WritePosition:X10}) is greater than {nameof(ContentLength)} (0x{ContentLength:X10}).");
}
catch (Exception ex)
{ {
var delayMS = (int)(startTime.AddSeconds(1d / THROTTLE_FREQUENCY) - DateTime.UtcNow).TotalMilliseconds; Serilog.Log.Error(ex, "An error was encountered while downloading {Uri}", Uri);
if (delayMS > 0) }
await Task.Delay(delayMS, _cancellationSource.Token); finally
{
startTime = DateTime.UtcNow; downloadedPiece.Set();
bytesReadSinceThrottle = 0; downloadEnded.Set();
}
} }
#endregion #endregion
} while (downloadPosition < endPosition && !IsCancelled && bytesRead > 0); #region Json Connverters
await _writeFile.FlushAsync(_cancellationSource.Token); public static JsonSerializerSettings GetJsonSerializerSettings()
WritePosition = downloadPosition;
if (!IsCancelled && WritePosition < endPosition)
throw new WebException($"Downloaded size (0x{WritePosition:X10}) is less than {nameof(ContentLength)} (0x{ContentLength:X10}).");
if (WritePosition > endPosition)
throw new WebException($"Downloaded size (0x{WritePosition:X10}) is greater than {nameof(ContentLength)} (0x{ContentLength:X10}).");
}
catch (TaskCanceledException)
{ {
Serilog.Log.Information("Download was cancelled"); var settings = new JsonSerializerSettings();
settings.Converters.Add(new CookieContainerConverter());
settings.Converters.Add(new WebHeaderCollectionConverter());
return settings;
} }
finally
internal class CookieContainerConverter : JsonConverter
{ {
_downloadedPiece.Set(); public override bool CanConvert(Type objectType)
OnUpdate(waitForWrite: true); => objectType == typeof(SingleUriCookieContainer);
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
var jObj = JObject.Load(reader);
var result = new SingleUriCookieContainer()
{
Uri = new Uri(jObj["Uri"].Value<string>()),
Capacity = jObj["Capacity"].Value<int>(),
MaxCookieSize = jObj["MaxCookieSize"].Value<int>(),
PerDomainCapacity = jObj["PerDomainCapacity"].Value<int>()
};
var cookieList = jObj["Cookies"].ToList();
foreach (var cookie in cookieList)
{
result.Add(
new Cookie
{
Comment = cookie["Comment"].Value<string>(),
HttpOnly = cookie["HttpOnly"].Value<bool>(),
Discard = cookie["Discard"].Value<bool>(),
Domain = cookie["Domain"].Value<string>(),
Expired = cookie["Expired"].Value<bool>(),
Expires = cookie["Expires"].Value<DateTime>(),
Name = cookie["Name"].Value<string>(),
Path = cookie["Path"].Value<string>(),
Port = cookie["Port"].Value<string>(),
Secure = cookie["Secure"].Value<bool>(),
Value = cookie["Value"].Value<string>(),
Version = cookie["Version"].Value<int>(),
});
}
return result;
}
public override bool CanWrite => true;
public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
var cookies = value as SingleUriCookieContainer;
var obj = (JObject)JToken.FromObject(value);
var container = cookies.GetCookies();
var propertyNames = container.Select(c => JToken.FromObject(c));
obj.AddFirst(new JProperty("Cookies", new JArray(propertyNames)));
obj.WriteTo(writer);
}
}
internal class WebHeaderCollectionConverter : JsonConverter
{
public override bool CanConvert(Type objectType)
=> objectType == typeof(WebHeaderCollection);
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
var jObj = JObject.Load(reader);
var result = new WebHeaderCollection();
foreach (var kvp in jObj)
result.Add(kvp.Key, kvp.Value.Value<string>());
return result;
}
public override bool CanWrite => true;
public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
var jObj = new JObject();
var type = value.GetType();
var headers = value as WebHeaderCollection;
var jHeaders = headers.AllKeys.Select(k => new JProperty(k, headers[k]));
jObj.Add(jHeaders);
jObj.WriteTo(writer);
} }
} }
@ -322,10 +370,10 @@ namespace AaxDecrypter
#region Download Stream Reader #region Download Stream Reader
[JsonIgnore] [JsonIgnore]
public override bool CanRead => _readFile.CanRead; public override bool CanRead => true;
[JsonIgnore] [JsonIgnore]
public override bool CanSeek => _readFile.CanSeek; public override bool CanSeek => true;
[JsonIgnore] [JsonIgnore]
public override bool CanWrite => false; public override bool CanWrite => false;
@ -335,8 +383,8 @@ namespace AaxDecrypter
{ {
get get
{ {
if (DownloadTask is null) if (!hasBegunDownloading)
throw new InvalidOperationException($"Background downloader must first be started by calling {nameof(BeginDownloadingAsync)}"); BeginDownloading();
return ContentLength; return ContentLength;
} }
} }
@ -353,18 +401,18 @@ namespace AaxDecrypter
[JsonIgnore] [JsonIgnore]
public override int WriteTimeout { get => base.WriteTimeout; set => base.WriteTimeout = value; } public override int WriteTimeout { get => base.WriteTimeout; set => base.WriteTimeout = value; }
public override void Flush() => throw new InvalidOperationException(); public override void Flush() => throw new NotImplementedException();
public override void SetLength(long value) => throw new InvalidOperationException(); public override void SetLength(long value) => throw new NotImplementedException();
public override void Write(byte[] buffer, int offset, int count) => throw new InvalidOperationException(); public override void Write(byte[] buffer, int offset, int count) => throw new NotImplementedException();
public override int Read(byte[] buffer, int offset, int count) public override int Read(byte[] buffer, int offset, int count)
{ {
if (DownloadTask is null) if (!hasBegunDownloading)
throw new InvalidOperationException($"Background downloader must first be started by calling {nameof(BeginDownloadingAsync)}"); BeginDownloading();
var toRead = Math.Min(count, Length - Position); var toRead = Math.Min(count, Length - Position);
WaitToPosition(Position + toRead); WaitToPosition(Position + toRead);
return IsCancelled ? 0 : _readFile.Read(buffer, offset, count); return _readFile.Read(buffer, offset, count);
} }
public override long Seek(long offset, SeekOrigin origin) public override long Seek(long offset, SeekOrigin origin)
@ -380,43 +428,38 @@ namespace AaxDecrypter
return _readFile.Position = newPosition; return _readFile.Position = newPosition;
} }
/// <summary>Blocks until the file has downloaded to at least <paramref name="requiredPosition"/>, then returns. </summary> /// <summary>
/// <param name="requiredPosition">The minimum required flushed data length in <see cref="SaveFilePath"/>.</param> /// Blocks until the file has downloaded to at least <paramref name="requiredPosition"/>, then returns.
/// </summary>
/// <param name="requiredPosition">The minimum required flished data length in <see cref="SaveFilePath"/>.</param>
private void WaitToPosition(long requiredPosition) private void WaitToPosition(long requiredPosition)
{ {
while (WritePosition < requiredPosition while (WritePosition < requiredPosition
&& DownloadTask?.IsCompleted is false && hasBegunDownloading
&& !IsCancelled) && !IsCancelled
&& !downloadEnded.WaitOne(0))
{ {
_downloadedPiece.WaitOne(50); downloadedPiece.WaitOne(100);
} }
} }
private bool disposed = false; public override void Close()
/*
* https://learn.microsoft.com/en-us/dotnet/api/system.io.stream.dispose?view=net-7.0
*
* In derived classes, do not override the Close() method, instead, put all of the
* Stream cleanup logic in the Dispose(Boolean) method.
*/
protected override void Dispose(bool disposing)
{ {
if (disposing && !disposed) IsCancelled = true;
{
_cancellationSource.Cancel();
DownloadTask?.GetAwaiter().GetResult();
_downloadedPiece?.Dispose();
_cancellationSource?.Dispose();
_readFile.Dispose();
_writeFile.Dispose();
OnUpdate(waitForWrite: true);
}
disposed = true; while (downloadEnded is not null && !downloadEnded.WaitOne(100)) ;
base.Dispose(disposing);
_readFile.Close();
_writeFile.Close();
_networkStream?.Close();
Update();
} }
#endregion #endregion
~NetworkFileStream()
{
downloadEnded?.Close();
downloadedPiece?.Close();
}
} }
} }

View File

@ -1,9 +1,11 @@
using Dinah.Core.IO; using Dinah.Core.IO;
using Newtonsoft.Json;
namespace AaxDecrypter namespace AaxDecrypter
{ {
internal class NetworkFileStreamPersister : JsonFilePersister<NetworkFileStream> internal class NetworkFileStreamPersister : JsonFilePersister<NetworkFileStream>
{ {
/// <summary>Alias for Target </summary> /// <summary>Alias for Target </summary>
public NetworkFileStream NetworkFileStream => Target; public NetworkFileStream NetworkFileStream => Target;
@ -15,11 +17,7 @@ namespace AaxDecrypter
public NetworkFileStreamPersister(string path, string jsonPath = null) public NetworkFileStreamPersister(string path, string jsonPath = null)
: base(path, jsonPath) { } : base(path, jsonPath) { }
protected override void Dispose(bool disposing) protected override JsonSerializerSettings GetSerializerSettings() => NetworkFileStream.GetJsonSerializerSettings();
{
if (disposing)
NetworkFileStream?.Dispose();
base.Dispose(disposing);
}
} }
} }

View File

@ -1,17 +0,0 @@
using FileManager;
#nullable enable
namespace AaxDecrypter;
public record TempFile
{
public LongPath FilePath { get; init; }
public string Extension { get; }
public MultiConvertFileProperties? PartProperties { get; init; }
public TempFile(LongPath filePath, string? extension = null)
{
FilePath = filePath;
extension ??= System.IO.Path.GetExtension(filePath);
Extension = FileUtility.GetStandardizedExtension(extension).ToLowerInvariant();
}
}

View File

@ -1,34 +1,112 @@
using FileManager; using System;
using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using Dinah.Core.Net.Http;
using FileManager;
namespace AaxDecrypter namespace AaxDecrypter
{ {
public class UnencryptedAudiobookDownloader : AudiobookDownloadBase public class UnencryptedAudiobookDownloader : AudiobookDownloadBase
{ {
protected override long InputFilePosition => InputFileStream.WritePosition;
public UnencryptedAudiobookDownloader(string outDirectory, string cacheDirectory, IDownloadOptions dlLic) public UnencryptedAudiobookDownloader(string outFileName, string cacheDirectory, IDownloadOptions dlLic)
: base(outDirectory, cacheDirectory, dlLic) : base(outFileName, cacheDirectory, dlLic) { }
public override async Task<bool> RunAsync()
{ {
AsyncSteps.Name = "Download Unencrypted Audiobook"; try
AsyncSteps["Step 1: Download Audiobook"] = Step_DownloadAndDecryptAudiobookAsync;
AsyncSteps["Step 2: Create Cue"] = Step_CreateCueAsync;
}
protected override async Task<bool> Step_DownloadAndDecryptAudiobookAsync()
{ {
await InputFileStream.DownloadTask; Serilog.Log.Information("Begin download and convert Aaxc To {format}", DownloadOptions.OutputFormat);
if (IsCanceled) //Step 1
return false; Serilog.Log.Information("Begin Step 1: Get Mp3 Metadata");
if (await Task.Run(Step_GetMetadata))
Serilog.Log.Information("Completed Step 1: Get Mp3 Metadata");
else else
{ {
FinalizeDownload(); Serilog.Log.Information("Failed to Complete Step 1: Get Mp3 Metadata");
var tempFile = GetNewTempFilePath(DownloadOptions.OutputFormat.ToString()); return false;
FileUtility.SaferMove(InputFileStream.SaveFilePath, tempFile.FilePath); }
OnTempFileCreated(tempFile);
//Step 2
Serilog.Log.Information("Begin Step 2: Download Audiobook");
if (await Task.Run(Step_DownloadAudiobookAsSingleFile))
Serilog.Log.Information("Completed Step 2: Download Audiobook");
else
{
Serilog.Log.Information("Failed to Complete Step 2: Download Audiobook");
return false;
}
//Step 3
Serilog.Log.Information("Begin Step 3: Cleanup");
if (await Task.Run(Step_Cleanup))
Serilog.Log.Information("Completed Step 3: Cleanup");
else
{
Serilog.Log.Information("Failed to Complete Step 3: Cleanup");
return false;
}
Serilog.Log.Information("Completed download and convert Aaxc To {format}", DownloadOptions.OutputFormat);
return true; return true;
} }
catch (Exception ex)
{
Serilog.Log.Error(ex, "Error encountered in download and convert Aaxc To {format}", DownloadOptions.OutputFormat);
return false;
}
}
public override Task CancelAsync()
{
IsCanceled = true;
CloseInputFileStream();
return Task.CompletedTask;
}
protected bool Step_GetMetadata()
{
OnRetrievedCoverArt(null);
return !IsCanceled;
}
private bool Step_DownloadAudiobookAsSingleFile()
{
DateTime startTime = DateTime.Now;
// MUST put InputFileStream.Length first, because it starts background downloader.
while (InputFileStream.Length > InputFileStream.WritePosition && !InputFileStream.IsCancelled)
{
var rate = InputFileStream.WritePosition / (DateTime.Now - startTime).TotalSeconds;
var estTimeRemaining = (InputFileStream.Length - InputFileStream.WritePosition) / rate;
if (double.IsNormal(estTimeRemaining))
OnDecryptTimeRemaining(TimeSpan.FromSeconds(estTimeRemaining));
var progressPercent = (double)InputFileStream.WritePosition / InputFileStream.Length;
OnDecryptProgressUpdate(
new DownloadProgress
{
ProgressPercentage = 100 * progressPercent,
BytesReceived = (long)(InputFileStream.Length * progressPercent),
TotalBytesToReceive = InputFileStream.Length
});
Thread.Sleep(200);
}
CloseInputFileStream();
var realOutputFileName = FileUtility.SaferMoveToValidPath(InputFileStream.SaveFilePath, OutputFileName, DownloadOptions.ReplacementCharacters);
SetOutputFileName(realOutputFileName);
OnFileCreated(realOutputFileName);
return !IsCanceled;
} }
} }
} }

View File

@ -1,15 +1,11 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFramework>net9.0</TargetFramework> <TargetFramework>net6.0</TargetFramework>
<Version>12.5.3.1</Version> <Version>8.5.1.1</Version>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="Octokit" Version="14.0.0" /> <PackageReference Include="Octokit" Version="4.0.1" />
<!-- Do not remove unused Serilog.Sinks -->
<!-- Only File sink is currently used. By user request (June 2024) others packages are included for experimental use. -->
<PackageReference Include="Serilog.Sinks.Console" Version="6.0.0" />
<PackageReference Include="Serilog.Sinks.File" Version="7.0.0" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>
<ProjectReference Include="..\ApplicationServices\ApplicationServices.csproj" /> <ProjectReference Include="..\ApplicationServices\ApplicationServices.csproj" />

View File

@ -1,48 +1,42 @@
using ApplicationServices; using System;
using AudibleUtilities;
using Dinah.Core.IO;
using Dinah.Core.Logging;
using LibationFileManager;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using Serilog;
using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Diagnostics;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Reflection; using System.Reflection;
using System.Runtime.InteropServices; using ApplicationServices;
using AudibleUtilities;
using Dinah.Core.Collections.Generic;
using Dinah.Core.IO;
using Dinah.Core.Logging;
using LibationFileManager;
using Microsoft.EntityFrameworkCore;
using Newtonsoft.Json.Linq;
using Serilog;
namespace AppScaffolding namespace AppScaffolding
{ {
public enum ReleaseIdentifier public enum ReleaseIdentifier
{ {
None, None,
WindowsClassic = OS.Windows | Variety.Classic | Architecture.X64, WindowsClassic,
WindowsAvalonia = OS.Windows | Variety.Chardonnay | Architecture.X64, WindowsAvalonia,
LinuxAvalonia = OS.Linux | Variety.Chardonnay | Architecture.X64, LinuxAvalonia,
MacOSAvalonia = OS.MacOS | Variety.Chardonnay | Architecture.X64, MacOSAvalonia
LinuxAvalonia_Arm64 = OS.Linux | Variety.Chardonnay | Architecture.Arm64,
MacOSAvalonia_Arm64 = OS.MacOS | Variety.Chardonnay | Architecture.Arm64
} }
// I know I'm taking the wine metaphor a bit far by naming this "Variety", but I don't know what else to call it // I know I'm taking the wine metaphor a bit far by naming this "Variety", but I don't know what else to call it
[Flags] public enum VarietyType { None, Classic, Chardonnay }
public enum Variety
{
None,
Classic = 0x10000,
Chardonnay = 0x20000,
}
public static class LibationScaffolding public static class LibationScaffolding
{ {
public const string RepositoryUrl = "ht" + "tps://github.com/rmcrackan/Libation";
public const string WebsiteUrl = "ht" + "tps://getlibation.com";
public const string RepositoryLatestUrl = "ht" + "tps://github.com/rmcrackan/Libation/releases/latest";
public static ReleaseIdentifier ReleaseIdentifier { get; private set; } public static ReleaseIdentifier ReleaseIdentifier { get; private set; }
public static Variety Variety { get; private set; } public static VarietyType Variety
=> ReleaseIdentifier == ReleaseIdentifier.WindowsClassic ? VarietyType.Classic
: ReleaseIdentifier.In(ReleaseIdentifier.WindowsAvalonia, ReleaseIdentifier.LinuxAvalonia, ReleaseIdentifier.MacOSAvalonia) ? VarietyType.Chardonnay
: VarietyType.None;
public static void SetReleaseIdentifier(ReleaseIdentifier releaseID)
=> ReleaseIdentifier = releaseID;
// AppScaffolding // AppScaffolding
private static Assembly _executingAssembly; private static Assembly _executingAssembly;
@ -68,8 +62,6 @@ namespace AppScaffolding
// // outdated. kept here as an example of what belongs in this area // // outdated. kept here as an example of what belongs in this area
// // Migrations.migrate_to_v5_2_0__pre_config(); // // Migrations.migrate_to_v5_2_0__pre_config();
Configuration.SetLibationVersion(BuildVersion);
//***********************************************// //***********************************************//
// // // //
// do not use Configuration before this line // // do not use Configuration before this line //
@ -82,26 +74,114 @@ namespace AppScaffolding
public static void RunPostConfigMigrations(Configuration config) public static void RunPostConfigMigrations(Configuration config)
{ {
AudibleApiStorage.EnsureAccountsSettingsFileExists(); AudibleApiStorage.EnsureAccountsSettingsFileExists();
PopulateMissingConfigValues(config);
// //
// migrations go below here // migrations go below here
// //
Migrations.migrate_to_v6_6_9(config); Migrations.migrate_to_v6_6_9(config);
Migrations.migrate_to_v11_5_0(config); Migrations.migrate_from_7_10_1(config);
Migrations.migrate_to_v11_6_5(config); }
Migrations.migrate_to_v12_0_1(config);
public static void PopulateMissingConfigValues(Configuration config)
{
config.InProgress ??= Configuration.WinTemp;
if (!config.Exists(nameof(config.UseCoverAsFolderIcon)))
config.UseCoverAsFolderIcon = false;
if (!config.Exists(nameof(config.BetaOptIn)))
config.BetaOptIn = false;
if (!config.Exists(nameof(config.AllowLibationFixup)))
config.AllowLibationFixup = true;
if (!config.Exists(nameof(config.CreateCueSheet)))
config.CreateCueSheet = true;
if (!config.Exists(nameof(config.RetainAaxFile)))
config.RetainAaxFile = false;
if (!config.Exists(nameof(config.SplitFilesByChapter)))
config.SplitFilesByChapter = false;
if (!config.Exists(nameof(config.StripUnabridged)))
config.StripUnabridged = false;
if (!config.Exists(nameof(config.StripAudibleBrandAudio)))
config.StripAudibleBrandAudio = false;
if (!config.Exists(nameof(config.DecryptToLossy)))
config.DecryptToLossy = false;
if (!config.Exists(nameof(config.LameTargetBitrate)))
config.LameTargetBitrate = false;
if (!config.Exists(nameof(config.LameDownsampleMono)))
config.LameDownsampleMono = true;
if (!config.Exists(nameof(config.LameBitrate)))
config.LameBitrate = 64;
if (!config.Exists(nameof(config.LameConstantBitrate)))
config.LameConstantBitrate = false;
if (!config.Exists(nameof(config.LameMatchSourceBR)))
config.LameMatchSourceBR = true;
if (!config.Exists(nameof(config.LameVBRQuality)))
config.LameVBRQuality = 2;
if (!config.Exists(nameof(config.BadBook)))
config.BadBook = Configuration.BadBookAction.Ask;
if (!config.Exists(nameof(config.ShowImportedStats)))
config.ShowImportedStats = true;
if (!config.Exists(nameof(config.ImportEpisodes)))
config.ImportEpisodes = true;
if (!config.Exists(nameof(config.DownloadEpisodes)))
config.DownloadEpisodes = true;
if (!config.Exists(nameof(config.ReplacementCharacters)))
config.ReplacementCharacters = FileManager.ReplacementCharacters.Default;
if (!config.Exists(nameof(config.FolderTemplate)))
config.FolderTemplate = Templates.Folder.DefaultTemplate;
if (!config.Exists(nameof(config.FileTemplate)))
config.FileTemplate = Templates.File.DefaultTemplate;
if (!config.Exists(nameof(config.ChapterFileTemplate)))
config.ChapterFileTemplate = Templates.ChapterFile.DefaultTemplate;
if (!config.Exists(nameof(config.ChapterTitleTemplate)))
config.ChapterTitleTemplate = Templates.ChapterTitle.DefaultTemplate;
if (!config.Exists(nameof(config.AutoScan)))
config.AutoScan = true;
if (!config.Exists(nameof(config.GridColumnsVisibilities)))
config.GridColumnsVisibilities = new Dictionary<string, bool>();
if (!config.Exists(nameof(config.GridColumnsDisplayIndices)))
config.GridColumnsDisplayIndices = new Dictionary<string, int>();
if (!config.Exists(nameof(config.GridColumnsWidths)))
config.GridColumnsWidths = new Dictionary<string, int>();
if (!config.Exists(nameof(config.DownloadCoverArt)))
config.DownloadCoverArt = true;
if (!config.Exists(nameof(config.AutoDownloadEpisodes)))
config.AutoDownloadEpisodes = false;
} }
/// <summary>Initialize logging. Wire-up events. Run after migration</summary> /// <summary>Initialize logging. Wire-up events. Run after migration</summary>
public static void RunPostMigrationScaffolding(Variety variety, Configuration config) public static void RunPostMigrationScaffolding(Configuration config)
{ {
Variety = Enum.IsDefined(variety) ? variety : Variety.None;
var releaseID = (ReleaseIdentifier)((int)variety | (int)Configuration.OS | (int)RuntimeInformation.ProcessArchitecture);
ReleaseIdentifier = Enum.IsDefined(releaseID) ? releaseID : ReleaseIdentifier.None;
ensureSerilogConfig(config); ensureSerilogConfig(config);
configureLogging(config); configureLogging(config);
logStartupState(config); logStartupState(config);
@ -113,35 +193,14 @@ namespace AppScaffolding
private static void ensureSerilogConfig(Configuration config) private static void ensureSerilogConfig(Configuration config)
{ {
if (config.GetObject("Serilog") is JObject serilog) if (config.GetObject("Serilog") is not null)
{
bool fileChanged = false;
if (serilog.SelectToken("$.WriteTo[?(@.Name == 'ZipFile')]", false) is JObject zipFileSink)
{
zipFileSink["Name"] = "File";
fileChanged = true;
}
var hooks = typeof(FileSinkHook).AssemblyQualifiedName;
if (serilog.SelectToken("$.WriteTo[?(@.Name == 'File')].Args", false) is JObject fileSinkArgs
&& fileSinkArgs["hooks"]?.Value<string>() != hooks)
{
fileSinkArgs["hooks"] = hooks;
fileChanged = true;
}
if (fileChanged)
config.SetNonString(serilog.DeepClone(), "Serilog");
return; return;
}
var serilogObj = new JObject var serilogObj = new JObject
{ {
{ "MinimumLevel", "Information" }, { "MinimumLevel", "Information" },
{ "WriteTo", new JArray { "WriteTo", new JArray
{ {
// ABOUT SINKS
// Only File sink is currently used. By user request (June 2024) others packages are included for experimental use.
// new JObject { {"Name", "Console" } }, // this has caused more problems than it's solved // new JObject { {"Name", "Console" } }, // this has caused more problems than it's solved
new JObject new JObject
{ {
@ -150,7 +209,7 @@ namespace AppScaffolding
new JObject new JObject
{ {
// for this sink to work, a path must be provided. we override this below // for this sink to work, a path must be provided. we override this below
{ "path", Path.Combine(config.LibationFiles, "Log.log") }, { "path", Path.Combine(config.LibationFiles, "_Log.log") },
{ "rollingInterval", "Month" }, { "rollingInterval", "Month" },
// Serilog template formatting examples // Serilog template formatting examples
// - default: "{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} [{Level:u3}] {Message:lj}{NewLine}{Exception}" // - default: "{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} [{Level:u3}] {Message:lj}{NewLine}{Exception}"
@ -158,8 +217,7 @@ namespace AppScaffolding
// - with class and method info: "{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} [{Level:u3}] (at {Caller}) {Message:lj}{NewLine}{Exception}"; // - with class and method info: "{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} [{Level:u3}] (at {Caller}) {Message:lj}{NewLine}{Exception}";
// output example: 2019-11-26 08:48:40.224 -05:00 [DBG] (at LibationWinForms.Program.init()) Begin Libation // output example: 2019-11-26 08:48:40.224 -05:00 [DBG] (at LibationWinForms.Program.init()) Begin Libation
// {Properties:j} needed for expanded exception logging // {Properties:j} needed for expanded exception logging
{ "outputTemplate", "{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} [{Level:u3}] (at {Caller}) {Message:lj}{NewLine}{Exception} {Properties:j}" }, { "outputTemplate", "{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} [{Level:u3}] (at {Caller}) {Message:lj}{NewLine}{Exception} {Properties:j}" }
{ "hooks", typeof(FileSinkHook).AssemblyQualifiedName }, // for FileSinkHook
} }
} }
} }
@ -169,7 +227,7 @@ namespace AppScaffolding
{ "Using", new JArray{ "Dinah.Core", "Serilog.Exceptions" } }, // dll's name, NOT namespace { "Using", new JArray{ "Dinah.Core", "Serilog.Exceptions" } }, // dll's name, NOT namespace
{ "Enrich", new JArray{ "WithCaller", "WithExceptionDetails" } }, { "Enrich", new JArray{ "WithCaller", "WithExceptionDetails" } },
}; };
config.SetNonString(serilogObj, "Serilog"); config.SetObject("Serilog", serilogObj);
} }
// to restore original: Console.SetOut(origOut); // to restore original: Console.SetOut(origOut);
@ -246,20 +304,12 @@ namespace AppScaffolding
// begin logging session with a form feed // begin logging session with a form feed
Log.Logger.Information("\r\n\f"); Log.Logger.Information("\r\n\f");
static int fileCount(FileManager.LongPath longPath)
{
try { return FileManager.FileUtility.SaferEnumerateFiles(longPath).Count(); }
catch { return -1; }
}
Log.Logger.Information("Begin. {@DebugInfo}", new Log.Logger.Information("Begin. {@DebugInfo}", new
{ {
AppName = EntryAssembly.GetName().Name, AppName = EntryAssembly.GetName().Name,
Version = BuildVersion.ToString(), Version = BuildVersion.ToString(),
ReleaseIdentifier, ReleaseIdentifier,
Configuration.OS, Configuration.OS,
Environment.OSVersion,
InteropFactory.InteropFunctionsType, InteropFactory.InteropFunctionsType,
Mode = mode, Mode = mode,
LogLevel_Verbose_Enabled = Log.Logger.IsVerboseEnabled(), LogLevel_Verbose_Enabled = Log.Logger.IsVerboseEnabled(),
@ -269,7 +319,6 @@ namespace AppScaffolding
LogLevel_Error_Enabled = Log.Logger.IsErrorEnabled(), LogLevel_Error_Enabled = Log.Logger.IsErrorEnabled(),
LogLevel_Fatal_Enabled = Log.Logger.IsFatalEnabled(), LogLevel_Fatal_Enabled = Log.Logger.IsFatalEnabled(),
config.AutoScan,
config.BetaOptIn, config.BetaOptIn,
config.UseCoverAsFolderIcon, config.UseCoverAsFolderIcon,
config.LibationFiles, config.LibationFiles,
@ -278,12 +327,10 @@ namespace AppScaffolding
config.InProgress, config.InProgress,
AudibleFileStorage.DownloadsInProgressDirectory, AudibleFileStorage.DownloadsInProgressDirectory,
DownloadsInProgressFiles = fileCount(AudibleFileStorage.DownloadsInProgressDirectory), DownloadsInProgressFiles = FileManager.FileUtility.SaferEnumerateFiles(AudibleFileStorage.DownloadsInProgressDirectory).Count(),
AudibleFileStorage.DecryptInProgressDirectory, AudibleFileStorage.DecryptInProgressDirectory,
DecryptInProgressFiles = fileCount(AudibleFileStorage.DecryptInProgressDirectory), DecryptInProgressFiles = FileManager.FileUtility.SaferEnumerateFiles(AudibleFileStorage.DecryptInProgressDirectory).Count(),
disableIPv6 = AppContext.TryGetSwitch("System.Net.DisableIPv6", out bool disableIPv6Value),
}); });
if (InteropFactory.InteropFunctionsType is null) if (InteropFactory.InteropFunctionsType is null)
@ -292,34 +339,40 @@ namespace AppScaffolding
private static void wireUpSystemEvents(Configuration configuration) private static void wireUpSystemEvents(Configuration configuration)
{ {
LibraryCommands.LibrarySizeChanged += (object _, List<DataLayer.LibraryBook> libraryBooks) LibraryCommands.LibrarySizeChanged += (_, __) => SearchEngineCommands.FullReIndex();
=> SearchEngineCommands.FullReIndex(libraryBooks); LibraryCommands.BookUserDefinedItemCommitted += (_, books) => SearchEngineCommands.UpdateBooks(books);
LibraryCommands.BookUserDefinedItemCommitted += (_, books)
=> SearchEngineCommands.UpdateBooks(books);
} }
public static UpgradeProperties GetLatestRelease() public static UpgradeProperties GetLatestRelease()
{ {
// timed out // timed out
(var version, var latest, var zip) = getLatestRelease(TimeSpan.FromSeconds(10)); (var latest, var zip) = getLatestRelease(TimeSpan.FromSeconds(10));
if (version is null || latest is null || zip is null) if (latest is null || zip is null)
return null;
var latestVersionString = latest.TagName.Trim('v');
if (!Version.TryParse(latestVersionString, out var latestRelease))
return null;
// we're up to date
if (latestRelease <= BuildVersion)
return null; return null;
// we have an update // we have an update
var zipUrl = zip?.BrowserDownloadUrl; var zipUrl = zip?.BrowserDownloadUrl;
Log.Logger.Information("Update available: {@DebugInfo}", new Log.Logger.Information("Update available: {@DebugInfo}", new
{ {
latestRelease = version.ToString(), latestRelease = latestRelease.ToString(),
latest.HtmlUrl, latest.HtmlUrl,
zipUrl zipUrl
}); });
return new(zipUrl, latest.HtmlUrl, zip.Name, version, latest.Body); return new(zipUrl, latest.HtmlUrl, zip.Name, latestRelease);
} }
private static (Version releaseVersion, Octokit.Release, Octokit.ReleaseAsset) getLatestRelease(TimeSpan timeout) private static (Octokit.Release, Octokit.ReleaseAsset) getLatestRelease(TimeSpan timeout)
{ {
try try
{ {
@ -333,41 +386,26 @@ namespace AppScaffolding
{ {
Log.Logger.Error(aggEx, "Checking for new version too often"); Log.Logger.Error(aggEx, "Checking for new version too often");
} }
return (null, null, null); return (null, null);
} }
private static async System.Threading.Tasks.Task<(Version releaseVersion, Octokit.Release, Octokit.ReleaseAsset)> getLatestRelease() private static async System.Threading.Tasks.Task<(Octokit.Release, Octokit.ReleaseAsset)> getLatestRelease()
{ {
const string ownerAccount = "rmcrackan"; var ownerAccount = "rmcrackan";
const string repoName = "Libation"; var repoName = "Libation";
var gitHubClient = new Octokit.GitHubClient(new Octokit.ProductHeaderValue(repoName)); var gitHubClient = new Octokit.GitHubClient(new Octokit.ProductHeaderValue(repoName));
//https://docs.github.com/en/rest/releases/releases?apiVersion=2022-11-28#get-the-latest-release
var latestRelease = await gitHubClient.Repository.Release.GetLatest(ownerAccount, repoName);
//Ensure that latest release is greater than the current version
var latestVersionString = latestRelease.TagName.Trim('v');
if (!Version.TryParse(latestVersionString, out var releaseVersion) || releaseVersion <= BuildVersion)
return (null, null, null);
//Download the release index //Download the release index
var bts = await gitHubClient.Repository.Content.GetRawContent(ownerAccount, repoName, ".releaseindex.json"); var bts = await gitHubClient.Repository.Content.GetRawContent(ownerAccount, repoName, ".releaseindex.json");
var releaseIndex = JObject.Parse(System.Text.Encoding.ASCII.GetString(bts)); var releaseIndex = JObject.Parse(System.Text.Encoding.ASCII.GetString(bts));
var regexPattern = releaseIndex.Value<string>(ReleaseIdentifier.ToString());
string regexPattern; // https://octokitnet.readthedocs.io/en/latest/releases/
var releases = await gitHubClient.Repository.Release.GetAll(ownerAccount, repoName);
try
{
regexPattern = releaseIndex.Value<string>(InteropFactory.Create().ReleaseIdString);
}
catch
{
regexPattern = releaseIndex.Value<string>(ReleaseIdentifier.ToString());
}
var regex = new System.Text.RegularExpressions.Regex(regexPattern, System.Text.RegularExpressions.RegexOptions.IgnoreCase); var regex = new System.Text.RegularExpressions.Regex(regexPattern, System.Text.RegularExpressions.RegexOptions.IgnoreCase);
var latestRelease = releases.FirstOrDefault(r => !r.Draft && !r.Prerelease && r.Assets.Any(a => regex.IsMatch(a.Name)));
return (releaseVersion, latestRelease, latestRelease?.Assets?.FirstOrDefault(a => regex.IsMatch(a.Name))); return (latestRelease, latestRelease?.Assets?.FirstOrDefault(a => regex.IsMatch(a.Name)));
} }
} }
@ -420,137 +458,72 @@ namespace AppScaffolding
} }
} }
class FilterState_6_6_9 public static void migrate_from_7_10_1(Configuration config)
{ {
public bool UseDefault { get; set; } var lastMigrationThrew = config.GetNonString<bool>($"{nameof(migrate_from_7_10_1)}_ThrewError");
public List<string> Filters { get; set; } = new();
}
public static void migrate_to_v12_0_1(Configuration config) if (lastMigrationThrew) return;
{
#nullable enable
//Migrate from version 1 file cache to the dictionary-based version 2 cache
const string FILENAME_V1 = "FileLocations.json";
const string FILENAME_V2 = "FileLocationsV2.json";
var jsonFileV1 = Path.Combine(Configuration.Instance.LibationFiles, FILENAME_V1);
var jsonFileV2 = Path.Combine(Configuration.Instance.LibationFiles, FILENAME_V2);
if (!File.Exists(jsonFileV2) && File.Exists(jsonFileV1))
{
try
{
//FilePathCache loads the cache in its static constructor,
//so perform migration without using FilePathCache.CacheEntry
if (JArray.Parse(File.ReadAllText(jsonFileV1)) is not JArray v1Cache || v1Cache.Count == 0)
return;
Dictionary<string, JArray> cache = new();
//Convert to c# objects to speed up searching by ID inside the iterator
var allItems
= v1Cache
.Select(i => new
{
Id = i["Id"]?.Value<string>(),
Path = i["Path"]?["Path"]?.Value<string>()
}).Where(i => i.Id != null)
.ToArray();
foreach (var id in allItems.Select(i => i.Id).OfType<string>().Distinct())
{
//Use this opportunity to purge non-existent files and re-classify file types
//(due to *.aax files previously not being classified as FileType.AAXC)
var items = allItems
.Where(i => i.Id == id && File.Exists(i.Path))
.Select(i => new JObject
{
{ "Id", i.Id },
{ "FileType", (int)FileTypes.GetFileTypeFromPath(i.Path) },
{ "Path", new JObject{ { "Path", i.Path } } }
})
.ToArray();
if (items.Length == 0)
continue;
cache[id] = new JArray(items);
}
var cacheJson = new JObject { { "Dictionary", JObject.FromObject(cache) } };
var cacheFileText = cacheJson.ToString(Formatting.Indented);
void migrate()
{
File.WriteAllText(jsonFileV2, cacheFileText);
File.Delete(jsonFileV1);
}
try { migrate(); }
catch (IOException)
{
try { migrate(); }
catch (IOException)
{
migrate();
}
}
}
catch { /* eat */ }
}
#nullable restore
}
public static void migrate_to_v11_6_5(Configuration config)
{
//Settings migration for unsupported sample rates (#1116)
if (config.MaxSampleRate < AAXClean.SampleRate.Hz_8000)
config.MaxSampleRate = AAXClean.SampleRate.Hz_8000;
else if (config.MaxSampleRate > AAXClean.SampleRate.Hz_48000)
config.MaxSampleRate = AAXClean.SampleRate.Hz_48000;
}
public static void migrate_to_v11_5_0(Configuration config)
{
// Read file, but convert old format to new (with Name field) as necessary.
if (!File.Exists(QuickFilters.JsonFile))
{
QuickFilters.InMemoryState = new();
return;
}
try try
{ {
if (JsonConvert.DeserializeObject<QuickFilters.FilterState>(File.ReadAllText(QuickFilters.JsonFile))
is QuickFilters.FilterState inMemState)
{
QuickFilters.InMemoryState = inMemState;
return;
}
}
catch
{
// Eat
}
try //https://github.com/rmcrackan/Libation/issues/270#issuecomment-1152863629
{ //This migration helps fix databases contaminated with the 7.10.1 hack workaround
if (JsonConvert.DeserializeObject<FilterState_6_6_9>(File.ReadAllText(QuickFilters.JsonFile)) //and those with improperly identified or missing series. This does not solve cases
is FilterState_6_6_9 inMemState) //where individual episodes are in the db with a valid series link, but said series'
{ //parents have not been imported into the database. For those cases, Libation will
// Copy old structure to new. //attempt fixup by retrieving parents from the catalog endpoint
QuickFilters.InMemoryState = new();
QuickFilters.InMemoryState.UseDefault = inMemState.UseDefault;
foreach (var oldFilter in inMemState.Filters)
QuickFilters.InMemoryState.Filters.Add(new QuickFilters.NamedFilter(oldFilter, null));
return; using var context = DbContexts.GetContext();
//This migration removes books and series with SERIES_ prefix that were created
//as a hack workaround in 7.10.1. Said workaround was removed in 7.10.2
string removeHackSeries = "delete " +
"from series " +
"where AudibleSeriesId like 'SERIES%'";
string removeHackBooks = "delete " +
"from books " +
"where AudibleProductId like 'SERIES%'";
//Detect series parents that were added to the database as books with ContentType.Episode,
//and change them to ContentType.Parent
string updateContentType =
"UPDATE books " +
"SET contenttype = 4 " +
"WHERE audibleproductid IN (SELECT books.audibleproductid " +
"FROM books " +
"INNER JOIN series " +
"ON ( books.audibleproductid = " +
"series.audibleseriesid) " +
"WHERE books.contenttype = 2)";
//Then detect series parents that were added to the database as books with ContentType.Parent
//but are missing a series link, and add the link (don't know how this happened)
string addMissingSeriesLink =
"INSERT INTO seriesbook " +
"SELECT series.seriesid, " +
"books.bookid, " +
"'- 1' " +
"FROM books " +
"LEFT OUTER JOIN seriesbook " +
"ON books.bookid = seriesbook.bookid " +
"INNER JOIN series " +
"ON books.audibleproductid = series.audibleseriesid " +
"WHERE books.contenttype = 4 " +
"AND seriesbook.seriesid IS NULL";
context.Database.ExecuteSqlRaw(removeHackSeries);
context.Database.ExecuteSqlRaw(removeHackBooks);
context.Database.ExecuteSqlRaw(updateContentType);
context.Database.ExecuteSqlRaw(addMissingSeriesLink);
LibraryCommands.SaveContext(context);
} }
Debug.Assert(false, "Should not get here, QuickFilters.json deserialization issue"); catch (Exception ex)
}
catch
{ {
// Eat Serilog.Log.Logger.Error(ex, "An error occurred while running database migrations in {0}", nameof(migrate_from_7_10_1));
config.SetObject($"{nameof(migrate_from_7_10_1)}_ThrewError", true);
} }
} }
} }

View File

@ -0,0 +1,29 @@
using System;
namespace AppScaffolding
{
public abstract class OSConfigBase
{
public abstract Type InteropFunctionsType { get; }
public virtual Type[] ReferencedTypes { get; } = new Type[0];
public void Run()
{
//Each of these types belongs to a different windows-only assembly that's needed by
//the WinInterop methods. By referencing these types in main we force the runtime to
//load their assemblies before execution reaches inside main. This allows the calling
//process to find these assemblies in its module list.
_ = ReferencedTypes;
_ = InteropFunctionsType;
//Wait for the calling process to be ready to read the WriteLine()
Console.ReadLine();
// Signal the calling process that execution has reached inside main, and that all referenced assemblies have been loaded.
Console.WriteLine();
// Wait for the calling process to finish reading the process module list, then exit.
Console.ReadLine();
}
}
}

View File

@ -3,7 +3,6 @@ using System.Collections.Generic;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using Dinah.Core; using Dinah.Core;
using LibationFileManager;
using Newtonsoft.Json; using Newtonsoft.Json;
using Newtonsoft.Json.Linq; using Newtonsoft.Json.Linq;
@ -26,6 +25,9 @@ namespace AppScaffolding
: value; : value;
#region appsettings.json #region appsettings.json
private static string APPSETTINGS_JSON { get; } = Path.Combine(Path.GetDirectoryName(Environment.ProcessPath), "appsettings.json");
public static bool APPSETTINGS_Json_Exists => File.Exists(APPSETTINGS_JSON);
public static bool APPSETTINGS_TryGet(string key, out string value) public static bool APPSETTINGS_TryGet(string key, out string value)
{ {
@ -59,7 +61,11 @@ namespace AppScaffolding
/// <param name="save">True: save if contents changed. False: no not attempt save</param> /// <param name="save">True: save if contents changed. False: no not attempt save</param>
private static void process_APPSETTINGS_Json(Action<JObject> action, bool save = true) private static void process_APPSETTINGS_Json(Action<JObject> action, bool save = true)
{ {
var startingContents = File.ReadAllText(Configuration.AppsettingsJsonFile); // only insert if not exists
if (!APPSETTINGS_Json_Exists)
return;
var startingContents = File.ReadAllText(APPSETTINGS_JSON);
JObject jObj; JObject jObj;
try try
@ -82,7 +88,7 @@ namespace AppScaffolding
if (startingContents.EqualsInsensitive(endingContents_indented) || startingContents.EqualsInsensitive(endingContents_compact)) if (startingContents.EqualsInsensitive(endingContents_indented) || startingContents.EqualsInsensitive(endingContents_compact))
return; return;
File.WriteAllText(Configuration.AppsettingsJsonFile, endingContents_indented); File.WriteAllText(APPSETTINGS_JSON, endingContents_indented);
System.Threading.Thread.Sleep(100); System.Threading.Thread.Sleep(100);
} }
#endregion #endregion

View File

@ -1,26 +1,6 @@
using System; using System;
using System.Text.RegularExpressions;
namespace AppScaffolding namespace AppScaffolding
{ {
public partial record UpgradeProperties public record UpgradeProperties(string ZipUrl, string HtmlUrl, string ZipName, Version LatestRelease);
{
public string ZipUrl { get; }
public string HtmlUrl { get; }
public string ZipName { get; }
public Version LatestRelease { get; }
public string Notes { get; }
public UpgradeProperties(string zipUrl, string htmlUrl, string zipName, Version latestRelease, string notes)
{
ZipName = zipName;
HtmlUrl = htmlUrl;
ZipUrl = zipUrl;
LatestRelease = latestRelease;
Notes = LinkStripRegex().Replace(notes, "$1");
}
[GeneratedRegex(@"\[(.*)\]\(.*\)")]
private static partial Regex LinkStripRegex();
}
} }

View File

@ -1,12 +1,12 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFramework>net9.0</TargetFramework> <TargetFramework>net6.0</TargetFramework>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="CsvHelper" Version="33.1.0" /> <PackageReference Include="CsvHelper" Version="30.0.0" />
<PackageReference Include="NPOI" Version="2.7.4" /> <PackageReference Include="NPOI" Version="2.5.6" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>

View File

@ -1,78 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using DataLayer;
using Dinah.Core;
using LibationFileManager;
namespace ApplicationServices
{
public class BulkSetDownloadStatus
{
private List<(string message, LiberatedStatus newStatus, IEnumerable<LibraryBook> LibraryBooks)> actionSets { get; } = new();
public int Count => actionSets.Count;
public IEnumerable<string> Messages => actionSets.Select(a => a.message);
public string AggregateMessage => $"Are you sure you want to set {Messages.Aggregate((a, b) => $"{a} and {b}")}?";
private List<LibraryBook> _libraryBooks;
private bool _setDownloaded;
private bool _setNotDownloaded;
public BulkSetDownloadStatus(List<LibraryBook> libraryBooks, bool setDownloaded, bool setNotDownloaded)
{
_libraryBooks = libraryBooks;
_setDownloaded = setDownloaded;
_setNotDownloaded = setNotDownloaded;
}
public int Discover()
{
var bookExistsList = _libraryBooks
.Select(libraryBook => new
{
LibraryBook = libraryBook,
FileExists = AudibleFileStorage.Audio.GetPath(libraryBook.Book.AudibleProductId) is not null
})
.ToList();
if (_setDownloaded)
{
var books2change = bookExistsList
.Where(a => a.FileExists && a.LibraryBook.Book.UserDefinedItem.BookStatus != LiberatedStatus.Liberated)
.Select(a => a.LibraryBook)
.ToList();
if (books2change.Any())
actionSets.Add((
$"{"book".PluralizeWithCount(books2change.Count)} to 'Downloaded'",
LiberatedStatus.Liberated,
books2change));
}
if (_setNotDownloaded)
{
var books2change = bookExistsList
.Where(a => !a.FileExists && a.LibraryBook.Book.UserDefinedItem.BookStatus != LiberatedStatus.NotLiberated)
.Select(a => a.LibraryBook)
.ToList();
if (books2change.Any())
actionSets.Add((
$"{"book".PluralizeWithCount(books2change.Count)} to 'Not Downloaded'",
LiberatedStatus.NotLiberated,
books2change));
}
return Count;
}
public void Execute()
{
foreach (var a in actionSets)
a.LibraryBooks.UpdateBookStatus(a.newStatus);
}
}
}

View File

@ -1,27 +1,22 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Text;
using System.Threading.Tasks; using System.Threading.Tasks;
using AudibleApi; using AudibleApi;
using AudibleUtilities; using AudibleUtilities;
using DataLayer; using DataLayer;
using Dinah.Core; using Dinah.Core;
using Dinah.Core.Logging;
using DtoImporterService; using DtoImporterService;
using FileManager;
using LibationFileManager; using LibationFileManager;
using Newtonsoft.Json.Linq;
using Serilog; using Serilog;
using static DtoImporterService.PerfLogger; using static DtoImporterService.PerfLogger;
#nullable enable
namespace ApplicationServices namespace ApplicationServices
{ {
public static class LibraryCommands public static class LibraryCommands
{ {
public static event EventHandler<int>? ScanBegin; public static event EventHandler<int> ScanBegin;
public static event EventHandler<int>? ScanEnd; public static event EventHandler ScanEnd;
public static bool Scanning { get; private set; } public static bool Scanning { get; private set; }
private static object _lock { get; } = new(); private static object _lock { get; } = new();
@ -32,7 +27,7 @@ namespace ApplicationServices
ScanEnd += (_, __) => Scanning = false; ScanEnd += (_, __) => Scanning = false;
} }
public static async Task<List<LibraryBook>> FindInactiveBooks(IEnumerable<LibraryBook> existingLibrary, params Account[] accounts) public static async Task<List<LibraryBook>> FindInactiveBooks(Func<Account, Task<ApiExtended>> apiExtendedfunc, IEnumerable<LibraryBook> existingLibrary, params Account[] accounts)
{ {
logRestart(); logRestart();
@ -58,7 +53,7 @@ namespace ApplicationServices
try try
{ {
logTime($"pre {nameof(scanAccountsAsync)} all"); logTime($"pre {nameof(scanAccountsAsync)} all");
var libraryItems = await scanAccountsAsync(accounts, libraryOptions); var libraryItems = await scanAccountsAsync(apiExtendedfunc, accounts, libraryOptions);
logTime($"post {nameof(scanAccountsAsync)} all"); logTime($"post {nameof(scanAccountsAsync)} all");
var totalCount = libraryItems.Count; var totalCount = libraryItems.Count;
@ -95,20 +90,18 @@ namespace ApplicationServices
{ {
stop(); stop();
var putBreakPointHere = logOutput; var putBreakPointHere = logOutput;
ScanEnd?.Invoke(null, 0); ScanEnd?.Invoke(null, null);
GC.Collect(GC.MaxGeneration, GCCollectionMode.Aggressive, true, true);
} }
} }
#region FULL LIBRARY scan and import #region FULL LIBRARY scan and import
public static async Task<(int totalCount, int newCount)> ImportAccountAsync(params Account[]? accounts) public static async Task<(int totalCount, int newCount)> ImportAccountAsync(Func<Account, Task<ApiExtended>> apiExtendedfunc, params Account[] accounts)
{ {
logRestart(); logRestart();
if (accounts is null || accounts.Length == 0) if (accounts is null || accounts.Length == 0)
return (0, 0); return (0, 0);
int newCount = 0;
try try
{ {
lock (_lock) lock (_lock)
@ -121,17 +114,10 @@ namespace ApplicationServices
logTime($"pre {nameof(scanAccountsAsync)} all"); logTime($"pre {nameof(scanAccountsAsync)} all");
var libraryOptions = new LibraryOptions var libraryOptions = new LibraryOptions
{ {
ResponseGroups ResponseGroups = LibraryOptions.ResponseGroupOptions.ALL_OPTIONS,
= LibraryOptions.ResponseGroupOptions.Rating | LibraryOptions.ResponseGroupOptions.Media
| LibraryOptions.ResponseGroupOptions.Relationships | LibraryOptions.ResponseGroupOptions.ProductDesc
| LibraryOptions.ResponseGroupOptions.Contributors | LibraryOptions.ResponseGroupOptions.ProvidedReview
| LibraryOptions.ResponseGroupOptions.ProductPlans | LibraryOptions.ResponseGroupOptions.Series
| LibraryOptions.ResponseGroupOptions.CategoryLadders | LibraryOptions.ResponseGroupOptions.ProductExtendedAttrs
| LibraryOptions.ResponseGroupOptions.PdfUrl | LibraryOptions.ResponseGroupOptions.OriginAsin
| LibraryOptions.ResponseGroupOptions.IsFinished,
ImageSizes = LibraryOptions.ImageSizeOptions._500 | LibraryOptions.ImageSizeOptions._1215 ImageSizes = LibraryOptions.ImageSizeOptions._500 | LibraryOptions.ImageSizeOptions._1215
}; };
var importItems = await scanAccountsAsync(accounts, libraryOptions); var importItems = await scanAccountsAsync(apiExtendedfunc, accounts, libraryOptions);
logTime($"post {nameof(scanAccountsAsync)} all"); logTime($"post {nameof(scanAccountsAsync)} all");
var totalCount = importItems.Count; var totalCount = importItems.Count;
@ -140,9 +126,25 @@ namespace ApplicationServices
if (totalCount == 0) if (totalCount == 0)
return default; return default;
Log.Logger.Information("Begin scan for orphaned episode parents");
var newParents = await findAndAddMissingParents(accounts);
Log.Logger.Information($"Orphan episode scan complete. New parents count {newParents}");
if (newParents >= 0)
{
//If any episodes are still orphaned, their series have been
//removed from the catalog and we'll never be able to find them.
//only do this if findAndAddMissingParents returned >= 0. If it
//returned < 0, an error happened and there's still a chance that
//a future successful run will find missing parents.
removedOrphanedEpisodes();
}
Log.Logger.Information("Begin long-running import"); Log.Logger.Information("Begin long-running import");
logTime($"pre {nameof(importIntoDbAsync)}"); logTime($"pre {nameof(importIntoDbAsync)}");
newCount = await importIntoDbAsync(importItems); var newCount = await importIntoDbAsync(importItems);
logTime($"post {nameof(importIntoDbAsync)}"); logTime($"post {nameof(importIntoDbAsync)}");
Log.Logger.Information($"Import complete. New count {newCount}"); Log.Logger.Information($"Import complete. New count {newCount}");
@ -175,119 +177,20 @@ namespace ApplicationServices
{ {
stop(); stop();
var putBreakPointHere = logOutput; var putBreakPointHere = logOutput;
ScanEnd?.Invoke(null, newCount); ScanEnd?.Invoke(null, null);
GC.Collect(GC.MaxGeneration, GCCollectionMode.Aggressive, true, true);
} }
} }
public static async Task<int> ImportSingleToDbAsync(AudibleApi.Common.Item item, string accountId, string localeName) private static async Task<List<ImportItem>> scanAccountsAsync(Func<Account, Task<ApiExtended>> apiExtendedfunc, Account[] accounts, LibraryOptions libraryOptions)
{
ArgumentValidator.EnsureNotNull(item, "item");
ArgumentValidator.EnsureNotNull(accountId, "accountId");
ArgumentValidator.EnsureNotNull(localeName, "localeName");
var importItem = new ImportItem
{
DtoItem = item,
AccountId = accountId,
LocaleName = localeName
};
var importItems = new List<ImportItem> { importItem };
var validator = new LibraryValidator();
var exceptions = validator.Validate(importItems.Select(i => i.DtoItem));
if (exceptions?.Any() ?? false)
{
Log.Logger.Error(new AggregateException(exceptions), "Error validating library book. {@DebugInfo}", new { item, accountId, localeName });
return 0;
}
using var context = DbContexts.GetContext();
var bookImporter = new BookImporter(context);
await Task.Run(() => bookImporter.Import(importItems));
var book = await Task.Run(() => context.LibraryBooks.FirstOrDefault(lb => lb.Book.AudibleProductId == importItem.DtoItem.ProductId));
if (book is null)
{
book = new LibraryBook(bookImporter.Cache[importItem.DtoItem.ProductId], importItem.DtoItem.DateAdded, importItem.AccountId);
context.LibraryBooks.Add(book);
}
else
{
book.AbsentFromLastScan = false;
}
try
{
int qtyChanged = await Task.Run(() => SaveContext(context));
if (qtyChanged > 0)
await Task.Run(() => finalizeLibrarySizeChange(context));
return qtyChanged;
}
catch (Exception ex)
{
Log.Logger.Error(ex, "Error adding single library book to DB. {@DebugInfo}", new { item, accountId, localeName });
return 0;
}
}
private static LogArchiver? openLogArchive(string? archivePath)
{
if (string.IsNullOrWhiteSpace(archivePath))
return null;
try
{
return new LogArchiver(archivePath);
}
catch (System.IO.InvalidDataException)
{
try
{
Log.Logger.Warning($"Deleting corrupted {nameof(LogArchiver)} at {archivePath}");
FileUtility.SaferDelete(archivePath);
return new LogArchiver(archivePath);
}
catch (Exception ex)
{
Log.Logger.Error(ex, $"Failed to open {nameof(LogArchiver)} at {archivePath}");
}
}
catch (Exception ex)
{
Log.Logger.Error(ex, $"Failed to open {nameof(LogArchiver)} at {archivePath}");
}
return null;
}
private static async Task<List<ImportItem>> scanAccountsAsync(Account[] accounts, LibraryOptions libraryOptions)
{ {
var tasks = new List<Task<List<ImportItem>>>(); var tasks = new List<Task<List<ImportItem>>>();
await using LogArchiver? archiver
= Log.Logger.IsDebugEnabled()
? openLogArchive(System.IO.Path.Combine(Configuration.Instance.LibationFiles, "LibraryScans.zip"))
: default;
archiver?.DeleteAllButNewestN(20);
foreach (var account in accounts) foreach (var account in accounts)
{
try
{ {
// get APIs in serial b/c of logins. do NOT move inside of parallel (Task.WhenAll) // get APIs in serial b/c of logins. do NOT move inside of parallel (Task.WhenAll)
var apiExtended = await ApiExtended.CreateAsync(account); var apiExtended = await apiExtendedfunc(account);
// add scanAccountAsync as a TASK: do not await // add scanAccountAsync as a TASK: do not await
tasks.Add(scanAccountAsync(apiExtended, account, libraryOptions, archiver)); tasks.Add(scanAccountAsync(apiExtended, account, libraryOptions));
}
catch(Exception ex)
{
//Catch to allow other accounts to continue scanning.
Log.Logger.Error(ex, "Failed to scan account");
}
} }
// import library in parallel // import library in parallel
@ -296,56 +199,23 @@ namespace ApplicationServices
return importItems; return importItems;
} }
private static async Task<List<ImportItem>> scanAccountAsync(ApiExtended apiExtended, Account account, LibraryOptions libraryOptions, LogArchiver? archiver) private static async Task<List<ImportItem>> scanAccountAsync(ApiExtended apiExtended, Account account, LibraryOptions libraryOptions)
{ {
ArgumentValidator.EnsureNotNull(account, nameof(account)); ArgumentValidator.EnsureNotNull(account, nameof(account));
Log.Logger.Information("ImportLibraryAsync. {@DebugInfo}", new Log.Logger.Information("ImportLibraryAsync. {@DebugInfo}", new
{ {
Account = account.MaskedLogEntry ?? "[null]" Account = account?.MaskedLogEntry ?? "[null]"
}); });
logTime($"pre scanAccountAsync {account.AccountName}"); logTime($"pre scanAccountAsync {account.AccountName}");
try
{
var dtoItems = await apiExtended.GetLibraryValidatedAsync(libraryOptions, Configuration.Instance.ImportEpisodes); var dtoItems = await apiExtended.GetLibraryValidatedAsync(libraryOptions, Configuration.Instance.ImportEpisodes);
logTime($"post scanAccountAsync {account.AccountName} qty: {dtoItems.Count}"); logTime($"post scanAccountAsync {account.AccountName} qty: {dtoItems.Count}");
await logDtoItemsAsync(dtoItems);
return dtoItems.Select(d => new ImportItem { DtoItem = d, AccountId = account.AccountId, LocaleName = account.Locale?.Name }).ToList(); return dtoItems.Select(d => new ImportItem { DtoItem = d, AccountId = account.AccountId, LocaleName = account.Locale?.Name }).ToList();
} }
catch(ImportValidationException ex)
{
await logDtoItemsAsync(ex.Items, ex.InnerExceptions.ToArray());
//If ImportValidationException is thrown, all Dto items get logged as part of the exception
throw new AggregateException(ex.InnerExceptions);
}
async Task logDtoItemsAsync(IEnumerable<AudibleApi.Common.Item> dtoItems, IEnumerable<Exception>? exceptions = null)
{
if (archiver is not null)
{
var fileName = $"{DateTime.Now:u} {account.MaskedLogEntry}.json";
var items = await Task.Run(() => JArray.FromObject(dtoItems.Select(i => i.SourceJson)));
var scanFile = new JObject
{
{ "Account", account.MaskedLogEntry },
{ "ScannedDateTime", DateTime.Now.ToString("u") },
};
if (exceptions?.Any() is true)
scanFile.Add("Exceptions", JArray.FromObject(exceptions));
scanFile.Add("Items", items);
await archiver.AddFileAsync(fileName, scanFile);
}
}
}
private static async Task<int> importIntoDbAsync(List<ImportItem> importItems) private static async Task<int> importIntoDbAsync(List<ImportItem> importItems)
{ {
@ -359,12 +229,90 @@ namespace ApplicationServices
// this is any changes at all to the database, not just new books // this is any changes at all to the database, not just new books
if (qtyChanges > 0) if (qtyChanges > 0)
await Task.Run(() => finalizeLibrarySizeChange(context)); await Task.Run(() => finalizeLibrarySizeChange());
logTime("importIntoDbAsync -- post finalizeLibrarySizeChange"); logTime("importIntoDbAsync -- post finalizeLibrarySizeChange");
return newCount; return newCount;
} }
static void removedOrphanedEpisodes()
{
using var context = DbContexts.GetContext();
try
{
var orphanedEpisodes =
context
.GetLibrary_Flat_NoTracking(includeParents: true)
.FindOrphanedEpisodes();
context.LibraryBooks.RemoveRange(orphanedEpisodes);
context.Books.RemoveRange(orphanedEpisodes.Select(lb => lb.Book));
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "An error occurred while trying to remove orphaned episodes from the database");
}
}
static async Task<int> findAndAddMissingParents(Account[] accounts)
{
using var context = DbContexts.GetContext();
var library = context.GetLibrary_Flat_NoTracking(includeParents: true);
try
{
var orphanedEpisodes = library.FindOrphanedEpisodes().ToList();
if (!orphanedEpisodes.Any())
return -1;
var orphanedSeries =
orphanedEpisodes
.SelectMany(lb => lb.Book.SeriesLink)
.DistinctBy(s => s.Series.AudibleSeriesId)
.ToList();
// The Catalog endpoint does not require authentication.
var api = new ApiUnauthenticated(accounts[0].Locale);
var seriesParents = orphanedSeries.Select(o => o.Series.AudibleSeriesId).ToList();
var items = await api.GetCatalogProductsAsync(seriesParents, CatalogOptions.ResponseGroupOptions.ALL_OPTIONS);
List<ImportItem> newParentsImportItems = new();
foreach (var sp in orphanedSeries)
{
var seriesItem = items.First(i => i.Asin == sp.Series.AudibleSeriesId);
if (seriesItem.Relationships is null)
continue;
var episode = orphanedEpisodes.First(l => l.Book.AudibleProductId == sp.Book.AudibleProductId);
seriesItem.PurchaseDate = new DateTimeOffset(episode.DateAdded);
seriesItem.Series = new AudibleApi.Common.Series[]
{
new AudibleApi.Common.Series{ Asin = seriesItem.Asin, Title = seriesItem.TitleWithSubtitle, Sequence = "-1"}
};
newParentsImportItems.Add(new ImportItem { DtoItem = seriesItem, AccountId = episode.Account, LocaleName = episode.Book.Locale });
}
var newCoutn = new LibraryBookImporter(context)
.Import(newParentsImportItems);
await context.SaveChangesAsync();
return newCoutn;
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "An error occurred while trying to scan for orphaned episode parents.");
return -1;
}
}
public static int SaveContext(LibationContext context) public static int SaveContext(LibationContext context)
{ {
try try
@ -387,188 +335,92 @@ namespace ApplicationServices
} }
#endregion #endregion
#region remove/restore books #region remove books
public static Task<int> RemoveBooksAsync(this IEnumerable<LibraryBook> idsToRemove) => Task.Run(() => removeBooks(idsToRemove)); public static Task<List<LibraryBook>> RemoveBooksAsync(List<string> idsToRemove) => Task.Run(() => removeBooks(idsToRemove));
public static int RemoveBook(this LibraryBook idToRemove) => removeBooks(new[] { idToRemove }); private static List<LibraryBook> removeBooks(List<string> idsToRemove)
private static int removeBooks(IEnumerable<LibraryBook> removeLibraryBooks)
{ {
try
{
if (removeLibraryBooks is null || !removeLibraryBooks.Any())
return 0;
using var context = DbContexts.GetContext(); using var context = DbContexts.GetContext();
var libBooks = context.GetLibrary_Flat_NoTracking();
// Entry() NoTracking entities before SaveChanges() var removeLibraryBooks = libBooks.Where(lb => idsToRemove.Contains(lb.Book.AudibleProductId)).ToList();
foreach (var lb in removeLibraryBooks) context.LibraryBooks.RemoveRange(removeLibraryBooks);
{ context.Books.RemoveRange(removeLibraryBooks.Select(lb => lb.Book));
lb.IsDeleted = true;
context.Entry(lb).State = Microsoft.EntityFrameworkCore.EntityState.Modified;
}
var qtyChanges = context.SaveChanges(); var qtyChanges = context.SaveChanges();
if (qtyChanges > 0) if (qtyChanges > 0)
finalizeLibrarySizeChange(context); finalizeLibrarySizeChange();
return qtyChanges; return removeLibraryBooks;
}
catch (Exception ex)
{
Log.Logger.Error(ex, "Error removing books");
throw;
}
}
public static int RestoreBooks(this IEnumerable<LibraryBook> libraryBooks)
{
try
{
if (libraryBooks is null || !libraryBooks.Any())
return 0;
using var context = DbContexts.GetContext();
// Entry() NoTracking entities before SaveChanges()
foreach (var lb in libraryBooks)
{
lb.IsDeleted = false;
context.Entry(lb).State = Microsoft.EntityFrameworkCore.EntityState.Modified;
}
var qtyChanges = context.SaveChanges();
if (qtyChanges > 0)
finalizeLibrarySizeChange(context);
return qtyChanges;
}
catch (Exception ex)
{
Log.Logger.Error(ex, "Error restoring books");
throw;
}
}
public static int PermanentlyDeleteBooks(this IEnumerable<LibraryBook> libraryBooks)
{
try
{
if (libraryBooks is null || !libraryBooks.Any())
return 0;
using var context = DbContexts.GetContext();
context.LibraryBooks.RemoveRange(libraryBooks);
context.Books.RemoveRange(libraryBooks.Select(lb => lb.Book));
var qtyChanges = context.SaveChanges();
if (qtyChanges > 0)
finalizeLibrarySizeChange(context);
return qtyChanges;
}
catch (Exception ex)
{
Log.Logger.Error(ex, "Error restoring books");
throw;
}
} }
#endregion #endregion
// call this whenever books are added or removed from library // call this whenever books are added or removed from library
private static void finalizeLibrarySizeChange(LibationContext context) private static void finalizeLibrarySizeChange() => LibrarySizeChanged?.Invoke(null, null);
{
var library = context.GetLibrary_Flat_NoTracking(includeParents: true);
LibrarySizeChanged?.Invoke(null, library);
}
/// <summary>Occurs when the size of the library changes. ie: books are added or removed</summary> /// <summary>Occurs when the size of the library changes. ie: books are added or removed</summary>
public static event EventHandler<List<LibraryBook>>? LibrarySizeChanged; public static event EventHandler LibrarySizeChanged;
/// <summary> /// <summary>
/// Occurs when the size of the library does not change but book(s) details do. Especially when <see cref="UserDefinedItem.Tags"/>, <see cref="UserDefinedItem.BookStatus"/>, or <see cref="UserDefinedItem.PdfStatus"/> changed values are successfully persisted. /// Occurs when the size of the library does not change but book(s) details do. Especially when <see cref="UserDefinedItem.Tags"/>, <see cref="UserDefinedItem.BookStatus"/>, or <see cref="UserDefinedItem.PdfStatus"/> changed values are successfully persisted.
/// </summary> /// </summary>
public static event EventHandler<IEnumerable<LibraryBook>>? BookUserDefinedItemCommitted; public static event EventHandler<IEnumerable<Book>> BookUserDefinedItemCommitted;
#region Update book details #region Update book details
public static int UpdateUserDefinedItem( public static int UpdateBookStatus(this Book book, LiberatedStatus bookStatus)
this LibraryBook lb, {
string? tags = null, book.UserDefinedItem.BookStatus = bookStatus;
return UpdateUserDefinedItem(book);
}
public static int UpdatePdfStatus(this Book book, LiberatedStatus pdfStatus)
{
book.UserDefinedItem.PdfStatus = pdfStatus;
return UpdateUserDefinedItem(book);
}
public static int UpdateBook(
this Book book,
string tags = null,
LiberatedStatus? bookStatus = null,
LiberatedStatus? pdfStatus = null)
=> UpdateBooks(tags, bookStatus, pdfStatus, book);
public static int UpdateBooks(
string tags = null,
LiberatedStatus? bookStatus = null, LiberatedStatus? bookStatus = null,
LiberatedStatus? pdfStatus = null, LiberatedStatus? pdfStatus = null,
Rating? rating = null) params Book[] books)
=> new[] { lb }.UpdateUserDefinedItem(tags, bookStatus, pdfStatus, rating); {
foreach (var book in books)
public static int UpdateUserDefinedItem( {
this IEnumerable<LibraryBook> lb,
string? tags = null,
LiberatedStatus? bookStatus = null,
LiberatedStatus? pdfStatus = null,
Rating? rating = null)
=> updateUserDefinedItem(
lb,
udi => {
// blank tags are expected. null tags are not // blank tags are expected. null tags are not
if (tags is not null) if (tags is not null && book.UserDefinedItem.Tags != tags)
udi.Tags = tags; book.UserDefinedItem.Tags = tags;
if (bookStatus.HasValue) if (bookStatus is not null && book.UserDefinedItem.BookStatus != bookStatus.Value)
udi.BookStatus = bookStatus.Value; book.UserDefinedItem.BookStatus = bookStatus.Value;
// method handles null logic // even though PdfStatus is nullable, there's no case where we'd actually overwrite with null
udi.SetPdfStatus(pdfStatus); if (pdfStatus is not null && book.UserDefinedItem.PdfStatus != pdfStatus.Value)
book.UserDefinedItem.PdfStatus = pdfStatus.Value;
}
if (rating is not null) return UpdateUserDefinedItem(books);
udi.UpdateRating(rating.OverallRating, rating.PerformanceRating, rating.StoryRating); }
}); public static int UpdateUserDefinedItem(params Book[] books) => UpdateUserDefinedItem(books.ToList());
public static int UpdateUserDefinedItem(IEnumerable<Book> books)
public static int UpdateBookStatus(this LibraryBook lb, LiberatedStatus bookStatus, Version? libationVersion, AudioFormat audioFormat, string audioVersion)
=> lb.UpdateUserDefinedItem(udi => { udi.BookStatus = bookStatus; udi.SetLastDownloaded(libationVersion, audioFormat, audioVersion); });
public static int UpdateBookStatus(this LibraryBook libraryBook, LiberatedStatus bookStatus)
=> libraryBook.UpdateUserDefinedItem(udi => udi.BookStatus = bookStatus);
public static int UpdateBookStatus(this IEnumerable<LibraryBook> libraryBooks, LiberatedStatus bookStatus)
=> libraryBooks.UpdateUserDefinedItem(udi => udi.BookStatus = bookStatus);
public static int UpdatePdfStatus(this LibraryBook libraryBook, LiberatedStatus pdfStatus)
=> libraryBook.UpdateUserDefinedItem(udi => udi.SetPdfStatus(pdfStatus));
public static int UpdatePdfStatus(this IEnumerable<LibraryBook> libraryBooks, LiberatedStatus pdfStatus)
=> libraryBooks.UpdateUserDefinedItem(udi => udi.SetPdfStatus(pdfStatus));
public static int UpdateTags(this LibraryBook libraryBook, string tags)
=> libraryBook.UpdateUserDefinedItem(udi => udi.Tags = tags);
public static int UpdateTags(this IEnumerable<LibraryBook> libraryBooks, string tags)
=> libraryBooks.UpdateUserDefinedItem(udi => udi.Tags = tags);
public static int UpdateUserDefinedItem(this LibraryBook libraryBook, Action<UserDefinedItem> action)
=> libraryBook.updateUserDefinedItem(action);
public static int UpdateUserDefinedItem(this IEnumerable<LibraryBook> libraryBooks, Action<UserDefinedItem> action)
=> libraryBooks.updateUserDefinedItem(action);
private static int updateUserDefinedItem(this LibraryBook libraryBook, Action<UserDefinedItem> action) => new[] { libraryBook }.updateUserDefinedItem(action);
private static int updateUserDefinedItem(this IEnumerable<LibraryBook> libraryBooks, Action<UserDefinedItem> action)
{ {
try try
{ {
if (libraryBooks is null || !libraryBooks.Any()) if (books is null || !books.Any())
return 0; return 0;
using var context = DbContexts.GetContext(); using var context = DbContexts.GetContext();
// Entry() instead of Attach() due to possible stack overflow with large tables // Attach() NoTracking entities before SaveChanges()
foreach (var book in libraryBooks) foreach (var book in books)
{ context.Attach(book.UserDefinedItem).State = Microsoft.EntityFrameworkCore.EntityState.Modified;
action?.Invoke(book.Book.UserDefinedItem);
var udiEntity = context.Entry(book.Book.UserDefinedItem);
udiEntity.State = Microsoft.EntityFrameworkCore.EntityState.Modified;
if (udiEntity.Reference(udi => udi.Rating).TargetEntry is Microsoft.EntityFrameworkCore.ChangeTracking.EntityEntry<Rating> ratingEntry)
ratingEntry.State = Microsoft.EntityFrameworkCore.EntityState.Modified;
}
var qtyChanges = context.SaveChanges(); var qtyChanges = context.SaveChanges();
if (qtyChanges > 0) if (qtyChanges > 0)
BookUserDefinedItemCommitted?.Invoke(null, libraryBooks); BookUserDefinedItemCommitted?.Invoke(null, books);
return qtyChanges; return qtyChanges;
} }
@ -591,76 +443,40 @@ namespace ApplicationServices
// below are queries, not commands. maybe I should make a LibraryQueries. except there's already one of those... // below are queries, not commands. maybe I should make a LibraryQueries. except there's already one of those...
public record LibraryStats(int booksFullyBackedUp, int booksDownloadedOnly, int booksNoProgress, int booksError, int booksUnavailable, int pdfsDownloaded, int pdfsNotDownloaded, int pdfsUnavailable) public record LibraryStats(int booksFullyBackedUp, int booksDownloadedOnly, int booksNoProgress, int booksError, int pdfsDownloaded, int pdfsNotDownloaded)
{ {
public int PendingBooks => booksNoProgress + booksDownloadedOnly; public int PendingBooks => booksNoProgress + booksDownloadedOnly;
public bool HasPendingBooks => PendingBooks > 0; public bool HasPendingBooks => PendingBooks > 0;
public bool HasBookResults => 0 < (booksFullyBackedUp + booksDownloadedOnly + booksNoProgress + booksError + booksUnavailable); public bool HasBookResults => 0 < (booksFullyBackedUp + booksDownloadedOnly + booksNoProgress + booksError);
public bool HasPdfResults => 0 < (pdfsNotDownloaded + pdfsDownloaded + pdfsUnavailable); public bool HasPdfResults => 0 < (pdfsNotDownloaded + pdfsDownloaded);
public string StatusString => HasPdfResults ? $"{toBookStatusString()} | {toPdfStatusString()}" : toBookStatusString();
private string toBookStatusString()
{
if (!HasBookResults) return "No books. Begin by importing your library";
if (!HasPendingBooks && booksError + booksUnavailable == 0) return $"All {"book".PluralizeWithCount(booksFullyBackedUp)} backed up";
var sb = new StringBuilder($"BACKUPS: No progress: {booksNoProgress} In process: {booksDownloadedOnly} Fully backed up: {booksFullyBackedUp}");
if (booksError > 0)
sb.Append($" Errors: {booksError}");
if (booksUnavailable > 0)
sb.Append($" Unavailable: {booksUnavailable}");
return sb.ToString();
} }
public static LibraryStats GetCounts()
private string toPdfStatusString()
{ {
if (pdfsNotDownloaded + pdfsUnavailable == 0) return $"All {pdfsDownloaded} PDFs downloaded"; var libraryBooks = DbContexts.GetLibrary_Flat_NoTracking();
var sb = new StringBuilder($"PDFs: NOT d/l'ed: {pdfsNotDownloaded} Downloaded: {pdfsDownloaded}");
if (pdfsUnavailable > 0)
sb.Append($" Unavailable: {pdfsUnavailable}");
return sb.ToString();
}
}
public static LibraryStats GetCounts(IEnumerable<LibraryBook>? libraryBooks = null)
{
libraryBooks ??= DbContexts.GetLibrary_Flat_NoTracking();
var results = libraryBooks var results = libraryBooks
.AsParallel() .AsParallel()
.WithoutParents() .Select(lb => Liberated_Status(lb.Book))
.Select(lb => new { absent = lb.AbsentFromLastScan, status = Liberated_Status(lb.Book) })
.ToList(); .ToList();
var booksFullyBackedUp = results.Count(r => r == LiberatedStatus.Liberated);
var booksDownloadedOnly = results.Count(r => r == LiberatedStatus.PartialDownload);
var booksNoProgress = results.Count(r => r == LiberatedStatus.NotLiberated);
var booksError = results.Count(r => r == LiberatedStatus.Error);
var booksFullyBackedUp = results.Count(r => r.status == LiberatedStatus.Liberated); Log.Logger.Information("Book counts. {@DebugInfo}", new { total = results.Count, booksFullyBackedUp, booksDownloadedOnly, booksNoProgress, booksError });
var booksDownloadedOnly = results.Count(r => !r.absent && r.status == LiberatedStatus.PartialDownload);
var booksNoProgress = results.Count(r => !r.absent && r.status == LiberatedStatus.NotLiberated);
var booksError = results.Count(r => r.status == LiberatedStatus.Error);
var booksUnavailable = results.Count(r => r.absent && r.status is LiberatedStatus.NotLiberated or LiberatedStatus.PartialDownload);
Log.Logger.Information("Book counts. {@DebugInfo}", new { total = results.Count, booksFullyBackedUp, booksDownloadedOnly, booksNoProgress, booksError, booksUnavailable }); var boolResults = libraryBooks
var pdfResults = libraryBooks
.AsParallel() .AsParallel()
.Where(lb => lb.Book.HasPdf()) .Where(lb => lb.Book.HasPdf())
.Select(lb => new { absent = lb.AbsentFromLastScan, status = Pdf_Status(lb.Book) }) .Select(lb => Pdf_Status(lb.Book))
.ToList(); .ToList();
var pdfsDownloaded = boolResults.Count(r => r == LiberatedStatus.Liberated);
var pdfsNotDownloaded = boolResults.Count(r => r == LiberatedStatus.NotLiberated);
var pdfsDownloaded = pdfResults.Count(r => r.status == LiberatedStatus.Liberated); Log.Logger.Information("PDF counts. {@DebugInfo}", new { total = boolResults.Count, pdfsDownloaded, pdfsNotDownloaded });
var pdfsNotDownloaded = pdfResults.Count(r => !r.absent && r.status == LiberatedStatus.NotLiberated);
var pdfsUnavailable = pdfResults.Count(r => r.absent && r.status == LiberatedStatus.NotLiberated);
Log.Logger.Information("PDF counts. {@DebugInfo}", new { total = pdfResults.Count, pdfsDownloaded, pdfsNotDownloaded, pdfsUnavailable }); return new(booksFullyBackedUp, booksDownloadedOnly, booksNoProgress, booksError, pdfsDownloaded, pdfsNotDownloaded);
return new(booksFullyBackedUp, booksDownloadedOnly, booksNoProgress, booksError, booksUnavailable, pdfsDownloaded, pdfsNotDownloaded, pdfsUnavailable);
} }
} }
} }

View File

@ -4,8 +4,8 @@ using System.Linq;
using CsvHelper; using CsvHelper;
using CsvHelper.Configuration.Attributes; using CsvHelper.Configuration.Attributes;
using DataLayer; using DataLayer;
using Newtonsoft.Json;
using NPOI.XSSF.UserModel; using NPOI.XSSF.UserModel;
using Serilog;
namespace ApplicationServices namespace ApplicationServices
{ {
@ -35,9 +35,6 @@ namespace ApplicationServices
[Name("Title")] [Name("Title")]
public string Title { get; set; } public string Title { get; set; }
[Name("Subtitle")]
public string Subtitle { get; set; }
[Name("Authors")] [Name("Authors")]
public string AuthorNames { get; set; } public string AuthorNames { get; set; }
@ -104,40 +101,9 @@ namespace ApplicationServices
[Name("Content Type")] [Name("Content Type")]
public string ContentType { get; set; } public string ContentType { get; set; }
[Name("Language")] [Name("Audio Format")]
public string Language { get; set; } public string AudioFormat { get; set; }
[Name("LastDownloaded")]
public DateTime? LastDownloaded { get; set; }
[Name("LastDownloadedVersion")]
public string LastDownloadedVersion { get; set; }
[Name("IsFinished")]
public bool IsFinished { get; set; }
[Name("IsSpatial")]
public bool IsSpatial { get; set; }
[Name("Last Downloaded File Version")]
public string LastDownloadedFileVersion { get; set; }
[Ignore /* csv ignore */]
public AudioFormat LastDownloadedFormat { get; set; }
[Name("Last Downloaded Codec"), JsonIgnore]
public string CodecString => LastDownloadedFormat?.CodecString ?? "";
[Name("Last Downloaded Sample rate"), JsonIgnore]
public int? SampleRate => LastDownloadedFormat?.SampleRate;
[Name("Last Downloaded Audio Channels"), JsonIgnore]
public int? ChannelCount => LastDownloadedFormat?.ChannelCount;
[Name("Last Downloaded Bitrate"), JsonIgnore]
public int? BitRate => LastDownloadedFormat?.BitRate;
} }
public static class LibToDtos public static class LibToDtos
{ {
public static List<ExportDto> ToDtos(this IEnumerable<LibraryBook> library) public static List<ExportDto> ToDtos(this IEnumerable<LibraryBook> library)
@ -148,7 +114,6 @@ namespace ApplicationServices
AudibleProductId = a.Book.AudibleProductId, AudibleProductId = a.Book.AudibleProductId,
Locale = a.Book.Locale, Locale = a.Book.Locale,
Title = a.Book.Title, Title = a.Book.Title,
Subtitle = a.Book.Subtitle,
AuthorNames = a.Book.AuthorNames(), AuthorNames = a.Book.AuthorNames(),
NarratorNames = a.Book.NarratorNames(), NarratorNames = a.Book.NarratorNames(),
LengthInMinutes = a.Book.LengthInMinutes, LengthInMinutes = a.Book.LengthInMinutes,
@ -157,30 +122,22 @@ namespace ApplicationServices
HasPdf = a.Book.HasPdf(), HasPdf = a.Book.HasPdf(),
SeriesNames = a.Book.SeriesNames(), SeriesNames = a.Book.SeriesNames(),
SeriesOrder = a.Book.SeriesLink.Any() ? a.Book.SeriesLink?.Select(sl => $"{sl.Order} : {sl.Series.Name}").Aggregate((a, b) => $"{a}, {b}") : "", SeriesOrder = a.Book.SeriesLink.Any() ? a.Book.SeriesLink?.Select(sl => $"{sl.Order} : {sl.Series.Name}").Aggregate((a, b) => $"{a}, {b}") : "",
CommunityRatingOverall = a.Book.Rating?.OverallRating.ZeroIsNull(), CommunityRatingOverall = a.Book.Rating?.OverallRating,
CommunityRatingPerformance = a.Book.Rating?.PerformanceRating.ZeroIsNull(), CommunityRatingPerformance = a.Book.Rating?.PerformanceRating,
CommunityRatingStory = a.Book.Rating?.StoryRating.ZeroIsNull(), CommunityRatingStory = a.Book.Rating?.StoryRating,
PictureId = a.Book.PictureId, PictureId = a.Book.PictureId,
IsAbridged = a.Book.IsAbridged, IsAbridged = a.Book.IsAbridged,
DatePublished = a.Book.DatePublished, DatePublished = a.Book.DatePublished,
CategoriesNames = string.Join("; ", a.Book.LowestCategoryNames()), CategoriesNames = a.Book.CategoriesNames().Any() ? a.Book.CategoriesNames().Aggregate((a, b) => $"{a}, {b}") : "",
MyRatingOverall = a.Book.UserDefinedItem.Rating.OverallRating.ZeroIsNull(), MyRatingOverall = a.Book.UserDefinedItem.Rating.OverallRating,
MyRatingPerformance = a.Book.UserDefinedItem.Rating.PerformanceRating.ZeroIsNull(), MyRatingPerformance = a.Book.UserDefinedItem.Rating.PerformanceRating,
MyRatingStory = a.Book.UserDefinedItem.Rating.StoryRating.ZeroIsNull(), MyRatingStory = a.Book.UserDefinedItem.Rating.StoryRating,
MyLibationTags = a.Book.UserDefinedItem.Tags, MyLibationTags = a.Book.UserDefinedItem.Tags,
BookStatus = a.Book.UserDefinedItem.BookStatus.ToString(), BookStatus = a.Book.UserDefinedItem.BookStatus.ToString(),
PdfStatus = a.Book.UserDefinedItem.PdfStatus.ToString(), PdfStatus = a.Book.UserDefinedItem.PdfStatus.ToString(),
ContentType = a.Book.ContentType.ToString(), ContentType = a.Book.ContentType.ToString(),
Language = a.Book.Language, AudioFormat = a.Book.AudioFormat.ToString()
LastDownloaded = a.Book.UserDefinedItem.LastDownloaded,
LastDownloadedVersion = a.Book.UserDefinedItem.LastDownloadedVersion?.ToString() ?? "",
IsFinished = a.Book.UserDefinedItem.IsFinished,
IsSpatial = a.Book.IsSpatial,
LastDownloadedFileVersion = a.Book.UserDefinedItem.LastDownloadedFileVersion ?? "",
LastDownloadedFormat = a.Book.UserDefinedItem.LastDownloadedFormat
}).ToList(); }).ToList();
private static float? ZeroIsNull(this float value) => value is 0 ? null : value;
} }
public static class LibraryExporter public static class LibraryExporter
{ {
@ -189,6 +146,7 @@ namespace ApplicationServices
var dtos = DbContexts.GetLibrary_Flat_NoTracking().ToDtos(); var dtos = DbContexts.GetLibrary_Flat_NoTracking().ToDtos();
if (!dtos.Any()) if (!dtos.Any())
return; return;
using var writer = new System.IO.StreamWriter(saveFilePath); using var writer = new System.IO.StreamWriter(saveFilePath);
using var csv = new CsvWriter(writer, System.Globalization.CultureInfo.CurrentCulture); using var csv = new CsvWriter(writer, System.Globalization.CultureInfo.CurrentCulture);
@ -200,7 +158,7 @@ namespace ApplicationServices
public static void ToJson(string saveFilePath) public static void ToJson(string saveFilePath)
{ {
var dtos = DbContexts.GetLibrary_Flat_NoTracking().ToDtos(); var dtos = DbContexts.GetLibrary_Flat_NoTracking().ToDtos();
var json = JsonConvert.SerializeObject(dtos, Formatting.Indented); var json = Newtonsoft.Json.JsonConvert.SerializeObject(dtos, Newtonsoft.Json.Formatting.Indented);
System.IO.File.WriteAllText(saveFilePath, json); System.IO.File.WriteAllText(saveFilePath, json);
} }
@ -227,7 +185,6 @@ namespace ApplicationServices
nameof(ExportDto.AudibleProductId), nameof(ExportDto.AudibleProductId),
nameof(ExportDto.Locale), nameof(ExportDto.Locale),
nameof(ExportDto.Title), nameof(ExportDto.Title),
nameof(ExportDto.Subtitle),
nameof(ExportDto.AuthorNames), nameof(ExportDto.AuthorNames),
nameof(ExportDto.NarratorNames), nameof(ExportDto.NarratorNames),
nameof(ExportDto.LengthInMinutes), nameof(ExportDto.LengthInMinutes),
@ -250,16 +207,7 @@ namespace ApplicationServices
nameof(ExportDto.BookStatus), nameof(ExportDto.BookStatus),
nameof(ExportDto.PdfStatus), nameof(ExportDto.PdfStatus),
nameof(ExportDto.ContentType), nameof(ExportDto.ContentType),
nameof(ExportDto.Language), nameof(ExportDto.AudioFormat)
nameof(ExportDto.LastDownloaded),
nameof(ExportDto.LastDownloadedVersion),
nameof(ExportDto.IsFinished),
nameof(ExportDto.IsSpatial),
nameof(ExportDto.LastDownloadedFileVersion),
nameof(ExportDto.CodecString),
nameof(ExportDto.SampleRate),
nameof(ExportDto.ChannelCount),
nameof(ExportDto.BitRate)
}; };
var col = 0; var col = 0;
foreach (var c in columns) foreach (var c in columns)
@ -280,14 +228,18 @@ namespace ApplicationServices
foreach (var dto in dtos) foreach (var dto in dtos)
{ {
col = 0; col = 0;
row = sheet.CreateRow(rowIndex++);
row = sheet.CreateRow(rowIndex);
row.CreateCell(col++).SetCellValue(dto.Account); row.CreateCell(col++).SetCellValue(dto.Account);
row.CreateCell(col++).SetCellValue(dto.DateAdded).CellStyle = dateStyle;
var dateAddedCell = row.CreateCell(col++);
dateAddedCell.CellStyle = dateStyle;
dateAddedCell.SetCellValue(dto.DateAdded);
row.CreateCell(col++).SetCellValue(dto.AudibleProductId); row.CreateCell(col++).SetCellValue(dto.AudibleProductId);
row.CreateCell(col++).SetCellValue(dto.Locale); row.CreateCell(col++).SetCellValue(dto.Locale);
row.CreateCell(col++).SetCellValue(dto.Title); row.CreateCell(col++).SetCellValue(dto.Title);
row.CreateCell(col++).SetCellValue(dto.Subtitle);
row.CreateCell(col++).SetCellValue(dto.AuthorNames); row.CreateCell(col++).SetCellValue(dto.AuthorNames);
row.CreateCell(col++).SetCellValue(dto.NarratorNames); row.CreateCell(col++).SetCellValue(dto.NarratorNames);
row.CreateCell(col++).SetCellValue(dto.LengthInMinutes); row.CreateCell(col++).SetCellValue(dto.LengthInMinutes);
@ -296,46 +248,46 @@ namespace ApplicationServices
row.CreateCell(col++).SetCellValue(dto.HasPdf); row.CreateCell(col++).SetCellValue(dto.HasPdf);
row.CreateCell(col++).SetCellValue(dto.SeriesNames); row.CreateCell(col++).SetCellValue(dto.SeriesNames);
row.CreateCell(col++).SetCellValue(dto.SeriesOrder); row.CreateCell(col++).SetCellValue(dto.SeriesOrder);
row.CreateCell(col++).SetCellValue(dto.CommunityRatingOverall);
row.CreateCell(col++).SetCellValue(dto.CommunityRatingPerformance); col = createCell(row, col, dto.CommunityRatingOverall);
row.CreateCell(col++).SetCellValue(dto.CommunityRatingStory); col = createCell(row, col, dto.CommunityRatingPerformance);
col = createCell(row, col, dto.CommunityRatingStory);
row.CreateCell(col++).SetCellValue(dto.PictureId); row.CreateCell(col++).SetCellValue(dto.PictureId);
row.CreateCell(col++).SetCellValue(dto.IsAbridged); row.CreateCell(col++).SetCellValue(dto.IsAbridged);
row.CreateCell(col++).SetCellValue(dto.DatePublished).CellStyle = dateStyle;
var datePubCell = row.CreateCell(col++);
datePubCell.CellStyle = dateStyle;
if (dto.DatePublished.HasValue)
datePubCell.SetCellValue(dto.DatePublished.Value);
else
datePubCell.SetCellValue("");
row.CreateCell(col++).SetCellValue(dto.CategoriesNames); row.CreateCell(col++).SetCellValue(dto.CategoriesNames);
row.CreateCell(col++).SetCellValue(dto.MyRatingOverall);
row.CreateCell(col++).SetCellValue(dto.MyRatingPerformance); col = createCell(row, col, dto.MyRatingOverall);
row.CreateCell(col++).SetCellValue(dto.MyRatingStory); col = createCell(row, col, dto.MyRatingPerformance);
col = createCell(row, col, dto.MyRatingStory);
row.CreateCell(col++).SetCellValue(dto.MyLibationTags); row.CreateCell(col++).SetCellValue(dto.MyLibationTags);
row.CreateCell(col++).SetCellValue(dto.BookStatus); row.CreateCell(col++).SetCellValue(dto.BookStatus);
row.CreateCell(col++).SetCellValue(dto.PdfStatus); row.CreateCell(col++).SetCellValue(dto.PdfStatus);
row.CreateCell(col++).SetCellValue(dto.ContentType); row.CreateCell(col++).SetCellValue(dto.ContentType);
row.CreateCell(col++).SetCellValue(dto.Language); row.CreateCell(col++).SetCellValue(dto.AudioFormat);
row.CreateCell(col++).SetCellValue(dto.LastDownloaded).CellStyle = dateStyle;
row.CreateCell(col++).SetCellValue(dto.LastDownloadedVersion); rowIndex++;
row.CreateCell(col++).SetCellValue(dto.IsFinished);
row.CreateCell(col++).SetCellValue(dto.IsSpatial);
row.CreateCell(col++).SetCellValue(dto.LastDownloadedFileVersion);
row.CreateCell(col++).SetCellValue(dto.CodecString);
row.CreateCell(col++).SetCellValue(dto.SampleRate);
row.CreateCell(col++).SetCellValue(dto.ChannelCount);
row.CreateCell(col++).SetCellValue(dto.BitRate);
} }
using var fileData = new System.IO.FileStream(saveFilePath, System.IO.FileMode.Create); using var fileData = new System.IO.FileStream(saveFilePath, System.IO.FileMode.Create);
workbook.Write(fileData); workbook.Write(fileData);
} }
private static int createCell(NPOI.SS.UserModel.IRow row, int col, float? nullableFloat)
private static NPOI.SS.UserModel.ICell SetCellValue(this NPOI.SS.UserModel.ICell cell, DateTime? nullableDate) {
=> nullableDate.HasValue ? cell.SetCellValue(nullableDate.Value) if (nullableFloat.HasValue)
: cell.SetCellType(NPOI.SS.UserModel.CellType.Numeric); row.CreateCell(col++).SetCellValue(nullableFloat.Value);
else
private static NPOI.SS.UserModel.ICell SetCellValue(this NPOI.SS.UserModel.ICell cell, int? nullableInt) row.CreateCell(col++).SetCellValue("");
=> nullableInt.HasValue ? cell.SetCellValue(nullableInt.Value) return col;
: cell.SetCellType(NPOI.SS.UserModel.CellType.Numeric); }
private static NPOI.SS.UserModel.ICell SetCellValue(this NPOI.SS.UserModel.ICell cell, float? nullableFloat)
=> nullableFloat.HasValue ? cell.SetCellValue(nullableFloat.Value)
: cell.SetCellType(NPOI.SS.UserModel.CellType.Numeric);
} }
} }

View File

@ -1,198 +0,0 @@
using AudibleApi.Common;
using CsvHelper;
using DataLayer;
using Newtonsoft.Json.Linq;
using NPOI.XSSF.UserModel;
using System;
using System.Collections.Generic;
using System.Linq;
namespace ApplicationServices
{
public static class RecordExporter
{
public static void ToXlsx(string saveFilePath, IEnumerable<IRecord> records)
{
if (!records.Any())
return;
using var workbook = new XSSFWorkbook();
var sheet = workbook.CreateSheet("Records");
var detailSubtotalFont = workbook.CreateFont();
detailSubtotalFont.IsBold = true;
var detailSubtotalCellStyle = workbook.CreateCellStyle();
detailSubtotalCellStyle.SetFont(detailSubtotalFont);
// headers
var rowIndex = 0;
var row = sheet.CreateRow(rowIndex);
var columns = new List<string>
{
nameof(Type.Name),
nameof(IRecord.Created),
nameof(IRecord.Start) + "_ms",
};
if (records.OfType<IAnnotation>().Any())
{
columns.Add(nameof(IAnnotation.AnnotationId));
columns.Add(nameof(IAnnotation.LastModified));
}
if (records.OfType<IRangeAnnotation>().Any())
{
columns.Add(nameof(IRangeAnnotation.End) + "_ms");
columns.Add(nameof(IRangeAnnotation.Text));
}
if (records.OfType<Clip>().Any())
columns.Add(nameof(Clip.Title));
var col = 0;
foreach (var c in columns)
{
var cell = row.CreateCell(col++);
cell.SetCellValue(c);
cell.CellStyle = detailSubtotalCellStyle;
}
var dateFormat = workbook.CreateDataFormat();
var dateStyle = workbook.CreateCellStyle();
dateStyle.DataFormat = dateFormat.GetFormat("MM/dd/yyyy HH:mm:ss");
// Add data rows
foreach (var record in records)
{
col = 0;
row = sheet.CreateRow(++rowIndex);
row.CreateCell(col++).SetCellValue(record.GetType().Name);
var dateCreatedCell = row.CreateCell(col++);
dateCreatedCell.CellStyle = dateStyle;
dateCreatedCell.SetCellValue(record.Created.DateTime);
row.CreateCell(col++).SetCellValue(record.Start.TotalMilliseconds);
if (record is IAnnotation annotation)
{
row.CreateCell(col++).SetCellValue(annotation.AnnotationId);
var lastModifiedCell = row.CreateCell(col++);
lastModifiedCell.CellStyle = dateStyle;
lastModifiedCell.SetCellValue(annotation.LastModified.DateTime);
if (annotation is IRangeAnnotation rangeAnnotation)
{
row.CreateCell(col++).SetCellValue(rangeAnnotation.End.TotalMilliseconds);
row.CreateCell(col++).SetCellValue(rangeAnnotation.Text);
if (rangeAnnotation is Clip clip)
row.CreateCell(col++).SetCellValue(clip.Title);
}
}
}
using var fileData = new System.IO.FileStream(saveFilePath, System.IO.FileMode.Create);
workbook.Write(fileData);
}
public static void ToJson(string saveFilePath, LibraryBook libraryBook, IEnumerable<IRecord> records)
{
if (!records.Any())
return;
var recordsEx = extendRecords(records);
var recordsObj = new JObject
{
{ "title", libraryBook.Book.TitleWithSubtitle},
{ "asin", libraryBook.Book.AudibleProductId},
{ "exportTime", DateTime.Now},
{ "records", JArray.FromObject(recordsEx) }
};
System.IO.File.WriteAllText(saveFilePath, recordsObj.ToString(Newtonsoft.Json.Formatting.Indented));
}
public static void ToCsv(string saveFilePath, IEnumerable<IRecord> records)
{
if (!records.Any())
return;
using var writer = new System.IO.StreamWriter(saveFilePath);
using var csv = new CsvWriter(writer, System.Globalization.CultureInfo.CurrentCulture);
//Write headers for the present record type that has the most properties
if (records.OfType<Clip>().Any())
csv.WriteHeader(typeof(ClipEx));
else if (records.OfType<Note>().Any())
csv.WriteHeader(typeof(NoteEx));
else if (records.OfType<Bookmark>().Any())
csv.WriteHeader(typeof(BookmarkEx));
else
csv.WriteHeader(typeof(LastHeardEx));
var recordsEx = extendRecords(records);
csv.NextRecord();
csv.WriteRecords(recordsEx.OfType<ClipEx>());
csv.WriteRecords(recordsEx.OfType<NoteEx>());
csv.WriteRecords(recordsEx.OfType<BookmarkEx>());
csv.WriteRecords(recordsEx.OfType<LastHeardEx>());
}
private static IEnumerable<IRecordEx> extendRecords(IEnumerable<IRecord> records)
=> records
.Select<IRecord, IRecordEx>(
r => r switch
{
Clip c => new ClipEx(nameof(Clip), c),
Note n => new NoteEx(nameof(Note), n),
Bookmark b => new BookmarkEx(nameof(Bookmark), b),
LastHeard l => new LastHeardEx(nameof(LastHeard), l),
_ => throw new InvalidOperationException(),
});
private interface IRecordEx { string Type { get; } }
private record LastHeardEx : LastHeard, IRecordEx
{
public string Type { get; }
public LastHeardEx(string type, LastHeard original) : base(original)
{
Type = type;
}
}
private record BookmarkEx : Bookmark, IRecordEx
{
public string Type { get; }
public BookmarkEx(string type, Bookmark original) : base(original)
{
Type = type;
}
}
private record NoteEx : Note, IRecordEx
{
public string Type { get; }
public NoteEx(string type, Note original) : base(original)
{
Type = type;
}
}
private record ClipEx : Clip, IRecordEx
{
public string Type { get; }
public ClipEx(string type, Clip original) : base(original)
{
Type = type;
}
}
}
}

View File

@ -34,7 +34,7 @@ namespace ApplicationServices
#region Update #region Update
private static bool isUpdating; private static bool isUpdating;
public static void UpdateBooks(IEnumerable<LibraryBook> books) public static void UpdateBooks(IEnumerable<Book> books)
{ {
// Semi-arbitrary. At some point it's more worth it to do a full re-index than to do one offs. // Semi-arbitrary. At some point it's more worth it to do a full re-index than to do one offs.
// I did not benchmark before choosing the number here // I did not benchmark before choosing the number here
@ -43,20 +43,23 @@ namespace ApplicationServices
else else
{ {
foreach (var book in books) foreach (var book in books)
UpdateUserDefinedItems(book);
}
}
public static void FullReIndex() => performSafeCommand(fullReIndex);
public static void FullReIndex(List<LibraryBook> libraryBooks)
=> performSafeCommand(se => fullReIndex(se, libraryBooks.WithoutParents()));
internal static void UpdateUserDefinedItems(LibraryBook book) => performSafeCommand(e =>
{ {
e.UpdateLiberatedStatus(book); UpdateLiberatedStatus(book);
e.UpdateTags(book.Book.AudibleProductId, book.Book.UserDefinedItem.Tags); UpdateBookTags(book);
e.UpdateUserRatings(book);
} }
}
}
public static void FullReIndex() => performSafeCommand(e =>
fullReIndex(e)
);
internal static void UpdateLiberatedStatus(Book book) => performSafeCommand(e =>
e.UpdateLiberatedStatus(book)
);
internal static void UpdateBookTags(Book book) => performSafeCommand(e =>
e.UpdateTags(book.AudibleProductId, book.UserDefinedItem.Tags)
); );
private static void performSafeCommand(Action<SearchEngine> action) private static void performSafeCommand(Action<SearchEngine> action)
@ -84,6 +87,7 @@ namespace ApplicationServices
isUpdating = true; isUpdating = true;
action(new SearchEngine()); action(new SearchEngine());
if (!prevIsUpdating) if (!prevIsUpdating)
SearchEngineUpdated?.Invoke(null, null); SearchEngineUpdated?.Invoke(null, null);
} }
@ -96,11 +100,8 @@ namespace ApplicationServices
private static void fullReIndex(SearchEngine engine) private static void fullReIndex(SearchEngine engine)
{ {
var library = DbContexts.GetLibrary_Flat_NoTracking(); var library = DbContexts.GetLibrary_Flat_NoTracking();
fullReIndex(engine, library); engine.CreateNewIndex(library);
} }
private static void fullReIndex(SearchEngine engine, IEnumerable<LibraryBook> libraryBooks)
=> engine.CreateNewIndex(libraryBooks);
#endregion #endregion
} }
} }

View File

@ -47,22 +47,6 @@ namespace AudibleUtilities
update_no_validate(); update_no_validate();
} }
} }
private string _cdm;
[JsonProperty]
public string Cdm
{
get => _cdm;
set
{
if (value is null)
return;
_cdm = value;
update_no_validate();
}
}
[JsonIgnore] [JsonIgnore]
public IReadOnlyList<Account> Accounts => _accounts_json.AsReadOnly(); public IReadOnlyList<Account> Accounts => _accounts_json.AsReadOnly();
#endregion #endregion

View File

@ -1,71 +1,109 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Globalization;
using System.Linq; using System.Linq;
using System.Threading.Channels; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using System.Diagnostics;
using AudibleApi; using AudibleApi;
using AudibleApi.Common; using AudibleApi.Common;
using Dinah.Core; using Dinah.Core;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using Polly; using Polly;
using Polly.Retry; using Polly.Retry;
using System.Threading;
#nullable enable
namespace AudibleUtilities namespace AudibleUtilities
{ {
/// <summary>USE THIS from within Libation. It wraps the call with correct JSONPath</summary> /// <summary>USE THIS from within Libation. It wraps the call with correct JSONPath</summary>
public class ApiExtended public class ApiExtended
{ {
public static Func<Account, ILoginChoiceEager>? LoginChoiceFactory { get; set; }
public Api Api { get; private set; } public Api Api { get; private set; }
private const int MaxConcurrency = 10;
private const int BatchSize = 50;
private ApiExtended(Api api) => Api = api; private ApiExtended(Api api) => Api = api;
/// <summary>Get api from existing tokens else login with 'eager' choice. External browser url is provided. Response can be external browser login or continuing with native api callbacks.</summary> /// <summary>Get api from existing tokens else login with 'eager' choice. External browser url is provided. Response can be external browser login or continuing with native api callbacks.</summary>
public static async Task<ApiExtended> CreateAsync(Account account, ILoginChoiceEager loginChoiceEager)
{
Serilog.Log.Logger.Information("{@DebugInfo}", new
{
LoginType = nameof(ILoginChoiceEager),
Account = account?.MaskedLogEntry ?? "[null]",
LocaleName = account?.Locale?.Name
});
var api = await EzApiCreator.GetApiAsync(
loginChoiceEager,
account.Locale,
AudibleApiStorage.AccountsSettingsFile,
account.GetIdentityTokensJsonPath());
return new ApiExtended(api);
}
/// <summary>Get api from existing tokens else login with native api callbacks.</summary>
public static async Task<ApiExtended> CreateAsync(Account account, ILoginCallback loginCallback)
{
Serilog.Log.Logger.Information("{@DebugInfo}", new
{
LoginType = nameof(ILoginCallback),
Account = account?.MaskedLogEntry ?? "[null]",
LocaleName = account?.Locale?.Name
});
var api = await EzApiCreator.GetApiAsync(
loginCallback,
account.Locale,
AudibleApiStorage.AccountsSettingsFile,
account.GetIdentityTokensJsonPath());
return new ApiExtended(api);
}
/// <summary>Get api from existing tokens else login with external browser</summary>
public static async Task<ApiExtended> CreateAsync(Account account, ILoginExternal loginExternal)
{
Serilog.Log.Logger.Information("{@DebugInfo}", new
{
LoginType = nameof(ILoginExternal),
Account = account?.MaskedLogEntry ?? "[null]",
LocaleName = account?.Locale?.Name
});
var api = await EzApiCreator.GetApiAsync(
loginExternal,
account.Locale,
AudibleApiStorage.AccountsSettingsFile,
account.GetIdentityTokensJsonPath());
return new ApiExtended(api);
}
/// <summary>Get api from existing tokens. Assumes you have valid login tokens. Else exception</summary>
public static async Task<ApiExtended> CreateAsync(Account account) public static async Task<ApiExtended> CreateAsync(Account account)
{ {
ArgumentValidator.EnsureNotNull(account, nameof(account)); ArgumentValidator.EnsureNotNull(account, nameof(account));
ArgumentValidator.EnsureNotNull(account.AccountId, nameof(account.AccountId));
ArgumentValidator.EnsureNotNull(account.Locale, nameof(account.Locale)); ArgumentValidator.EnsureNotNull(account.Locale, nameof(account.Locale));
try
{
Serilog.Log.Logger.Information("{@DebugInfo}", new Serilog.Log.Logger.Information("{@DebugInfo}", new
{ {
AccountMaskedLogEntry = account.MaskedLogEntry AccountMaskedLogEntry = account.MaskedLogEntry
}); });
var api = await EzApiCreator.GetApiAsync( return await CreateAsync(account.AccountId, account.Locale.Name);
account.Locale,
AudibleApiStorage.AccountsSettingsFile,
account.GetIdentityTokensJsonPath());
return new ApiExtended(api);
} }
catch
{
if (LoginChoiceFactory is null)
throw new InvalidOperationException($"The UI module must first set {nameof(LoginChoiceFactory)} before attempting to create the api");
/// <summary>Get api from existing tokens. Assumes you have valid login tokens. Else exception</summary>
public static async Task<ApiExtended> CreateAsync(string username, string localeName)
{
Serilog.Log.Logger.Information("{@DebugInfo}", new Serilog.Log.Logger.Information("{@DebugInfo}", new
{ {
LoginType = nameof(ILoginChoiceEager), Username = username.ToMask(),
Account = account.MaskedLogEntry ?? "[null]", LocaleName = localeName,
LocaleName = account.Locale?.Name
}); });
var api = await EzApiCreator.GetApiAsync( var api = await EzApiCreator.GetApiAsync(
LoginChoiceFactory(account), Localization.Get(localeName),
account.Locale,
AudibleApiStorage.AccountsSettingsFile, AudibleApiStorage.AccountsSettingsFile,
account.GetIdentityTokensJsonPath()); AudibleApiStorage.GetIdentityTokensJsonPath(username, localeName));
return new ApiExtended(api); return new ApiExtended(api);
} }
}
private static AsyncRetryPolicy policy { get; } private static AsyncRetryPolicy policy { get; }
= Policy.Handle<Exception>() = Policy.Handle<Exception>()
@ -83,152 +121,126 @@ namespace AudibleUtilities
private async Task<List<Item>> getItemsAsync(LibraryOptions libraryOptions, bool importEpisodes) private async Task<List<Item>> getItemsAsync(LibraryOptions libraryOptions, bool importEpisodes)
{ {
var items = new List<Item>();
Serilog.Log.Logger.Debug("Beginning library scan."); Serilog.Log.Logger.Debug("Beginning library scan.");
List<Item> items = new(); List<Task<List<Item>>> getChildEpisodesTasks = new();
var sw = Stopwatch.StartNew();
var totalTime = TimeSpan.Zero;
using var semaphore = new SemaphoreSlim(MaxConcurrency);
var episodeChannel = Channel.CreateUnbounded<string>(new UnboundedChannelOptions { SingleReader = true, SingleWriter = true }); int count = 0, maxConcurrentEpisodeScans = 5;
var batchReaderTask = readAllAsinsAsync(episodeChannel.Reader, semaphore); using SemaphoreSlim concurrencySemaphore = new(maxConcurrentEpisodeScans);
//Scan the library for all added books. await foreach (var item in Api.GetLibraryItemAsyncEnumerable(libraryOptions))
//Get relationship asins from episode-type items and write them to episodeChannel where they will be batched and queried.
await foreach (var item in Api.GetLibraryItemsPagesAsync(libraryOptions, BatchSize, semaphore))
{ {
if (importEpisodes) if ((item.IsEpisodes || item.IsSeriesParent) && importEpisodes)
{ {
var episodes = item.Where(i => i.IsEpisodes).ToList(); //Get child episodes asynchronously and await all at the end
var series = item.Where(i => i.IsSeriesParent).ToList(); getChildEpisodesTasks.Add(getChildEpisodesAsync(concurrencySemaphore, item));
}
else if (!item.IsEpisodes && !item.IsSeriesParent)
items.Add(item);
var parentAsins = episodes count++;
.SelectMany(i => i.Relationships)
.Where(r => r.RelationshipToProduct == RelationshipToProduct.Parent)
.Select(r => r.Asin);
var episodeAsins = series
.SelectMany(i => i.Relationships)
.Where(r => r.RelationshipToProduct == RelationshipToProduct.Child && r.RelationshipType == RelationshipType.Episode)
.Select(r => r.Asin);
foreach (var asin in parentAsins.Concat(episodeAsins))
episodeChannel.Writer.TryWrite(asin);
items.AddRange(episodes);
items.AddRange(series);
} }
items.AddRange(item.Where(i => !i.IsSeriesParent && !i.IsEpisodes)); Serilog.Log.Logger.Debug("Library scan complete. Found {count} books and series. Waiting on {getChildEpisodesTasksCount} series episode scans to complete.", count, getChildEpisodesTasks.Count);
}
sw.Stop(); //await and add all episodes from all parents
totalTime += sw.Elapsed; foreach (var epList in await Task.WhenAll(getChildEpisodesTasks))
Serilog.Log.Logger.Debug("Library scan complete after {elappsed_ms} ms. Found {count} books and series. Waiting on series episode scans to complete.", sw.ElapsedMilliseconds, items.Count); items.AddRange(epList);
sw.Restart();
//Signal that we're done adding asins Serilog.Log.Logger.Debug("Completed library scan.");
episodeChannel.Writer.Complete();
//Wait for all episodes/parents to be retrived #if DEBUG
var allEps = await batchReaderTask; //// this will not work for multi accounts
//var library_json = "library.json";
sw.Stop(); //library_json = System.IO.Path.GetFullPath(library_json);
totalTime += sw.Elapsed; //if (System.IO.File.Exists(library_json))
Serilog.Log.Logger.Debug("Episode scan complete after {elappsed_ms} ms. Found {count} episodes and series .", sw.ElapsedMilliseconds, allEps.Count); // items = AudibleApi.Common.Converter.FromJson<List<Item>>(System.IO.File.ReadAllText(library_json));
sw.Restart(); //System.IO.File.WriteAllText(library_json, AudibleApi.Common.Converter.ToJson(items));
#endif
Serilog.Log.Logger.Debug("Begin indexing series episodes"); var validators = new List<IValidator>();
items.AddRange(allEps); validators.AddRange(getValidators());
foreach (var v in validators)
//Set the Item.Series info for episodes and parents.
foreach (var parent in items.Where(i => i.IsSeriesParent))
{ {
var children = items.Where(i => i.IsEpisodes && i.Relationships.Any(r => r.Asin == parent.Asin)); var exceptions = v.Validate(items);
SetSeries(parent, children); if (exceptions is not null && exceptions.Any())
throw new AggregateException(exceptions);
} }
int orphansRemoved = items.RemoveAll(i => (i.IsEpisodes || i.IsSeriesParent) && i.Series is null);
if (orphansRemoved > 0)
Serilog.Log.Debug("{orphansRemoved} podcast orphans not imported", orphansRemoved);
sw.Stop();
totalTime += sw.Elapsed;
Serilog.Log.Logger.Information("Completed indexing series episodes after {elappsed_ms} ms.", sw.ElapsedMilliseconds);
Serilog.Log.Logger.Information($"Completed library scan in {totalTime.TotalMilliseconds:F0} ms.");
var allExceptions = IValidator.GetAllValidators().SelectMany(v => v.Validate(items)).ToList();
if (allExceptions?.Count > 0)
throw new ImportValidationException(items, allExceptions);
return items; return items;
} }
private static List<IValidator> getValidators()
{
var type = typeof(IValidator);
var types = AppDomain.CurrentDomain.GetAssemblies()
.SelectMany(s => s.GetTypes())
.Where(p => type.IsAssignableFrom(p) && !p.IsInterface);
return types.Select(t => Activator.CreateInstance(t) as IValidator).ToList();
}
#region episodes and podcasts #region episodes and podcasts
/// <summary> private async Task<List<Item>> getChildEpisodesAsync(SemaphoreSlim concurrencySemaphore, Item parent)
/// Read asins from the channel and request catalog item info in batches of <see cref="BatchSize"/>. Blocks until <paramref name="channelReader"/> is closed.
/// </summary>
/// <param name="channelReader">Input asins to batch</param>
/// <param name="semaphore">Shared semaphore to limit concurrency</param>
/// <returns>All <see cref="Item"/>s of asins written to the channel.</returns>
private async Task<List<Item>> readAllAsinsAsync(ChannelReader<string> channelReader, SemaphoreSlim semaphore)
{ {
int batchNum = 1; await concurrencySemaphore.WaitAsync();
List<Task<List<Item>>> getTasks = new();
while (await channelReader.WaitToReadAsync())
{
List<string> asins = new();
while (asins.Count < BatchSize && await channelReader.WaitToReadAsync())
{
var asin = await channelReader.ReadAsync();
if (!asins.Contains(asin))
asins.Add(asin);
}
await semaphore.WaitAsync();
getTasks.Add(getProductsAsync(batchNum++, asins, semaphore));
}
var completed = await Task.WhenAll(getTasks);
//We only want Series parents and Series episodes. Explude other relationship types (e.g. 'season')
return completed.SelectMany(l => l).Where(i => i.IsSeriesParent || i.IsEpisodes).ToList();
}
private async Task<List<Item>> getProductsAsync(int batchNum, List<string> asins, SemaphoreSlim semaphore)
{
Serilog.Log.Logger.Debug($"Batch {batchNum} Begin: Fetching {asins.Count} asins");
try try
{ {
var sw = Stopwatch.StartNew(); Serilog.Log.Logger.Debug("Beginning episode scan for {parent}", parent);
var items = await Api.GetCatalogProductsAsync(asins, CatalogOptions.ResponseGroupOptions.Rating | CatalogOptions.ResponseGroupOptions.Media
| CatalogOptions.ResponseGroupOptions.Relationships | CatalogOptions.ResponseGroupOptions.ProductDesc
| CatalogOptions.ResponseGroupOptions.Contributors | CatalogOptions.ResponseGroupOptions.ProvidedReview
| CatalogOptions.ResponseGroupOptions.ProductPlans | CatalogOptions.ResponseGroupOptions.Series
| CatalogOptions.ResponseGroupOptions.CategoryLadders | CatalogOptions.ResponseGroupOptions.ProductExtendedAttrs);
sw.Stop();
Serilog.Log.Logger.Debug($"Batch {batchNum} End: Retrieved {items.Count} items in {sw.ElapsedMilliseconds} ms"); List<Item> children;
return items; if (parent.IsEpisodes)
}
catch (Exception ex)
{ {
Serilog.Log.Logger.Error(ex, "Error fetching batch of episodes. {@DebugInfo}", new { asins }); //The 'parent' is a single episode that was added to the library.
throw; //Get the episode's parent and add it to the database.
}
finally { semaphore.Release(); } Serilog.Log.Logger.Debug("Supplied Parent is an episode. Beginning parent scan for {parent}", parent);
children = new() { parent };
var parentAsins = parent.Relationships
.Where(r => r.RelationshipToProduct == RelationshipToProduct.Parent)
.Select(p => p.Asin);
var seriesParents = await Api.GetCatalogProductsAsync(parentAsins, CatalogOptions.ResponseGroupOptions.ALL_OPTIONS);
int numSeriesParents = seriesParents.Count(p => p.IsSeriesParent);
if (numSeriesParents != 1)
{
//There should only ever be 1 top-level parent per episode. If not, log
//so we can figure out what to do about those special cases, and don't
//import the episode.
JsonSerializerSettings Settings = new()
{
MetadataPropertyHandling = MetadataPropertyHandling.Ignore,
DateParseHandling = DateParseHandling.None,
Converters =
{
new IsoDateTimeConverter { DateTimeStyles = DateTimeStyles.AssumeUniversal }
},
};
Serilog.Log.Logger.Error($"Found {numSeriesParents} parents for {parent.Asin}\r\nEpisode Product:\r\n{JsonConvert.SerializeObject(parent, Formatting.None, Settings)}");
return new List<Item>();
} }
public static void SetSeries(Item parent, IEnumerable<Item> children) var realParent = seriesParents.Single(p => p.IsSeriesParent);
realParent.PurchaseDate = parent.PurchaseDate;
Serilog.Log.Logger.Debug("Completed parent scan for {parent}", parent);
parent = realParent;
}
else
{ {
ArgumentValidator.EnsureNotNull(parent, nameof(parent)); children = await getEpisodeChildrenAsync(parent);
ArgumentValidator.EnsureNotNull(children, nameof(children)); if (!children.Any())
return new();
}
//A series parent will always have exactly 1 Series //A series parent will always have exactly 1 Series
parent.Series = new[] parent.Series = new Series[]
{ {
new Series new Series
{ {
@ -238,50 +250,97 @@ namespace AudibleUtilities
} }
}; };
if (parent.PurchaseDate == default) foreach (var child in children)
{ {
parent.PurchaseDate = children.Select(c => c.PurchaseDate).Order().FirstOrDefault(d => d != default);
if (parent.PurchaseDate == default)
{
Serilog.Log.Logger.Warning("{series} doesn't have a purchase date. Using UtcNow", parent);
parent.PurchaseDate = DateTimeOffset.UtcNow;
}
}
int lastEpNum = -1, dupeCount = 0;
foreach (var child in children.OrderBy(i => i.EpisodeNumber).ThenBy(i => i.PublicationDateTime))
{
string sequence;
if (child.EpisodeNumber is null)
{
// This should properly be Single() not FirstOrDefault(), but FirstOrDefault is defensive for malformed data from audible
sequence = parent.Relationships.FirstOrDefault(r => r.Asin == child.Asin)?.Sort?.ToString() ?? "0";
}
else
{
//multipart episodes may have the same episode number
if (child.EpisodeNumber == lastEpNum)
dupeCount++;
else
lastEpNum = child.EpisodeNumber.Value;
sequence = (lastEpNum + dupeCount).ToString();
}
// use parent's 'DateAdded'. DateAdded is just a convenience prop for: PurchaseDate.UtcDateTime // use parent's 'DateAdded'. DateAdded is just a convenience prop for: PurchaseDate.UtcDateTime
child.PurchaseDate = parent.PurchaseDate; child.PurchaseDate = parent.PurchaseDate;
// parent is essentially a series // parent is essentially a series
child.Series = new[] child.Series = new Series[]
{ {
new Series new Series
{ {
Asin = parent.Asin, Asin = parent.Asin,
Sequence = sequence, // This should properly be Single() not FirstOrDefault(), but FirstOrDefault is defensive for malformed data from audible
Sequence = parent.Relationships.FirstOrDefault(r => r.Asin == child.Asin)?.Sort?.ToString() ?? "0",
Title = parent.TitleWithSubtitle Title = parent.TitleWithSubtitle
} }
}; };
} }
children.Add(parent);
Serilog.Log.Logger.Debug("Completed episode scan for {parent}", parent);
return children;
}
finally
{
concurrencySemaphore.Release();
}
}
private async Task<List<Item>> getEpisodeChildrenAsync(Item parent)
{
var childrenIds = parent.Relationships
.Where(r => r.RelationshipToProduct == RelationshipToProduct.Child && r.RelationshipType == RelationshipType.Episode)
.Select(r => r.Asin)
.ToList();
// fetch children in batches
const int batchSize = 20;
var results = new List<Item>();
for (var i = 1; ; i++)
{
var idBatch = childrenIds.Skip((i - 1) * batchSize).Take(batchSize).ToList();
if (!idBatch.Any())
break;
List<Item> childrenBatch;
try
{
childrenBatch = await Api.GetCatalogProductsAsync(idBatch, CatalogOptions.ResponseGroupOptions.ALL_OPTIONS);
#if DEBUG
//var childrenBatchDebug = childrenBatch.Select(i => i.ToJson()).Aggregate((a, b) => $"{a}\r\n\r\n{b}");
//System.IO.File.WriteAllText($"children of {parent.Asin}.json", childrenBatchDebug);
#endif
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Error fetching batch of episodes. {@DebugInfo}", new
{
ParentId = parent.Asin,
ParentTitle = parent.Title,
BatchNumber = i,
ChildIdBatch = idBatch
});
throw;
}
Serilog.Log.Logger.Debug($"Batch {i}: {childrenBatch.Count} results\t({{parent}})", parent);
// the service returned no results. probably indicates an error. stop running batches
if (!childrenBatch.Any())
break;
results.AddRange(childrenBatch);
}
Serilog.Log.Logger.Debug("Parent episodes/podcasts series. Children found. {@DebugInfo}", new
{
ParentId = parent.Asin,
ParentTitle = parent.Title,
ChildCount = childrenIds.Count
});
if (childrenIds.Count != results.Count)
{
var ex = new ApplicationException($"Mis-match: Children defined by parent={childrenIds.Count}. Children returned by batches={results.Count}");
Serilog.Log.Logger.Error(ex, "{parent} - Quantity of series episodes defined by parent does not match quantity returned by batch fetching.", parent);
throw ex;
}
return results;
} }
#endregion #endregion
} }

View File

@ -5,58 +5,19 @@ using Newtonsoft.Json;
namespace AudibleUtilities namespace AudibleUtilities
{ {
public class AccountSettingsLoadErrorEventArgs : ErrorEventArgs
{
/// <summary>
/// Create a new, empty <see cref="AccountsSettings"/> file if true, otherwise throw
/// </summary>
public bool Handled { get; set; }
/// <summary>
/// The file path of the AccountsSettings.json file
/// </summary>
public string SettingsFilePath { get; }
public AccountSettingsLoadErrorEventArgs(string path, Exception exception)
: base(exception)
{
SettingsFilePath = path;
}
}
public static class AudibleApiStorage public static class AudibleApiStorage
{ {
public static string AccountsSettingsFile => Path.Combine(Configuration.Instance.LibationFiles, "AccountsSettings.json"); public static string AccountsSettingsFile => Path.Combine(Configuration.Instance.LibationFiles, "AccountsSettings.json");
public static event EventHandler<AccountSettingsLoadErrorEventArgs> LoadError;
public static void EnsureAccountsSettingsFileExists() public static void EnsureAccountsSettingsFileExists()
{ {
// saves. BEWARE: this will overwrite an existing file // saves. BEWARE: this will overwrite an existing file
if (!File.Exists(AccountsSettingsFile)) if (!File.Exists(AccountsSettingsFile))
{ _ = new AccountsSettingsPersister(new AccountsSettings(), AccountsSettingsFile);
//Save the JSON file manually so that AccountsSettingsPersister.Saving and AccountsSettingsPersister.Saved
//are not fired. There's no need to fire those events on an empty AccountsSettings file.
var accountSerializerSettings = AudibleApi.Authorization.Identity.GetJsonSerializerSettings();
File.WriteAllText(AccountsSettingsFile, JsonConvert.SerializeObject(new AccountsSettings(), Formatting.Indented, accountSerializerSettings));
}
} }
/// <summary>If you use this, be a good citizen and DISPOSE of it</summary> /// <summary>If you use this, be a good citizen and DISPOSE of it</summary>
public static AccountsSettingsPersister GetAccountsSettingsPersister() public static AccountsSettingsPersister GetAccountsSettingsPersister() => new AccountsSettingsPersister(AccountsSettingsFile);
{
try
{
return new AccountsSettingsPersister(AccountsSettingsFile);
}
catch (Exception ex)
{
var args = new AccountSettingsLoadErrorEventArgs(AccountsSettingsFile, ex);
LoadError?.Invoke(null, args);
if (args.Handled)
return GetAccountsSettingsPersister();
throw;
}
}
public static string GetIdentityTokensJsonPath(this Account account) public static string GetIdentityTokensJsonPath(this Account account)
=> GetIdentityTokensJsonPath(account.AccountId, account.Locale?.Name); => GetIdentityTokensJsonPath(account.AccountId, account.Locale?.Name);

View File

@ -8,18 +8,7 @@ namespace AudibleUtilities
public interface IValidator public interface IValidator
{ {
IEnumerable<Exception> Validate(IEnumerable<Item> items); IEnumerable<Exception> Validate(IEnumerable<Item> items);
public static IValidator[] GetAllValidators()
=> new IValidator[]
{
new LibraryValidator(),
new BookValidator(),
new CategoryValidator(),
new ContributorValidator(),
new SeriesValidator(),
};
} }
public class LibraryValidator : IValidator public class LibraryValidator : IValidator
{ {
public IEnumerable<Exception> Validate(IEnumerable<Item> items) public IEnumerable<Exception> Validate(IEnumerable<Item> items)
@ -90,10 +79,8 @@ namespace AudibleUtilities
var distinct = items.GetSeriesDistinct(); var distinct = items.GetSeriesDistinct();
if (distinct.Any(s => s.SeriesId is null)) if (distinct.Any(s => s.SeriesId is null))
exceptions.Add(new ArgumentException($"Collection contains {nameof(Item.Series)} with null {nameof(Series.SeriesId)}", nameof(items))); exceptions.Add(new ArgumentException($"Collection contains {nameof(Item.Series)} with null {nameof(Series.SeriesId)}", nameof(items)));
if (distinct.Any(s => s.SeriesName is null))
//// unfortunately, a user has a series with no name exceptions.Add(new ArgumentException($"Collection contains {nameof(Item.Series)} with null {nameof(Series.SeriesName)}", nameof(items)));
//if (distinct.Any(s => s.SeriesName is null))
// exceptions.Add(new ArgumentException($"Collection contains {nameof(Item.Series)} with null {nameof(Series.SeriesName)}", nameof(items)));
return exceptions; return exceptions;
} }

View File

@ -1,12 +1,11 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFramework>net9.0</TargetFramework> <TargetFramework>net6.0</TargetFramework>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="AudibleApi" Version="9.4.5.1" /> <PackageReference Include="AudibleApi" Version="4.6.6.1" />
<PackageReference Include="Google.Protobuf" Version="3.32.0" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>
@ -21,9 +20,4 @@
<DebugType>embedded</DebugType> <DebugType>embedded</DebugType>
</PropertyGroup> </PropertyGroup>
<ItemGroup>
<Compile Update="Widevine\Cdm.*.cs">
<DependentUpon>Cdm.cs</DependentUpon>
</Compile>
</ItemGroup>
</Project> </Project>

View File

@ -1,15 +0,0 @@
using AudibleApi.Common;
using System;
using System.Collections.Generic;
namespace AudibleUtilities
{
public class ImportValidationException : AggregateException
{
public List<Item> Items { get; }
public ImportValidationException(List<Item> items, IEnumerable<Exception> exceptions) : base(exceptions)
{
Items = items;
}
}
}

View File

@ -4,7 +4,6 @@ using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using AudibleApi; using AudibleApi;
using AudibleApi.Authorization; using AudibleApi.Authorization;
using AudibleApi.Cryptography;
using Dinah.Core; using Dinah.Core;
using Newtonsoft.Json; using Newtonsoft.Json;
using Newtonsoft.Json.Linq; using Newtonsoft.Json.Linq;
@ -43,9 +42,6 @@ namespace AudibleUtilities
[JsonProperty("locale_code")] [JsonProperty("locale_code")]
public string LocaleCode { get; private set; } public string LocaleCode { get; private set; }
[JsonProperty("with_username")]
public bool WithUsername { get; private set; }
[JsonProperty("activation_bytes")] [JsonProperty("activation_bytes")]
public string ActivationBytes { get; private set; } public string ActivationBytes { get; private set; }
@ -71,8 +67,7 @@ namespace AudibleUtilities
} }
[JsonIgnore] public ISystemDateTime SystemDateTime { get; } = new SystemDateTime(); [JsonIgnore] public ISystemDateTime SystemDateTime { get; } = new SystemDateTime();
[JsonIgnore] [JsonIgnore] public Locale Locale => Localization.Get(LocaleCode);
public Locale Locale => Localization.Locales.Where(l => l.WithUsername == WithUsername).Single(l => l.CountryCode == LocaleCode);
[JsonIgnore] public string DeviceSerialNumber => DeviceInfo.DeviceSerialNumber; [JsonIgnore] public string DeviceSerialNumber => DeviceInfo.DeviceSerialNumber;
[JsonIgnore] public string DeviceType => DeviceInfo.DeviceType; [JsonIgnore] public string DeviceType => DeviceInfo.DeviceType;
[JsonIgnore] public string AmazonAccountId => CustomerInfo.UserId; [JsonIgnore] public string AmazonAccountId => CustomerInfo.UserId;
@ -181,10 +176,9 @@ namespace AudibleUtilities
DevicePrivateKey = account.IdentityTokens.PrivateKey, DevicePrivateKey = account.IdentityTokens.PrivateKey,
AccessTokenExpires = account.IdentityTokens.ExistingAccessToken.Expires, AccessTokenExpires = account.IdentityTokens.ExistingAccessToken.Expires,
LocaleCode = account.Locale.CountryCode, LocaleCode = account.Locale.CountryCode,
WithUsername = account.Locale.WithUsername,
RefreshToken = account.IdentityTokens.RefreshToken.Value, RefreshToken = account.IdentityTokens.RefreshToken.Value,
StoreAuthenticationCookie = account.IdentityTokens.StoreAuthenticationCookie, StoreAuthenticationCookie = account.IdentityTokens.StoreAuthenticationCookie,
WebsiteCookies = new(account.IdentityTokens.Cookies), WebsiteCookies = new(account.IdentityTokens.Cookies.ToKeyValuePair()),
}; };
} }

View File

@ -1,189 +0,0 @@
using AudibleApi;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using System.Net.Http;
using AudibleApi.Cryptography;
using Newtonsoft.Json.Linq;
using Dinah.Core.Net.Http;
using System.Text.Json.Nodes;
#nullable enable
namespace AudibleUtilities.Widevine;
public partial class Cdm
{
/// <summary>
/// Get a <see cref="Cdm"/> from <see cref="AccountsSettings"/> or from the API.
/// </summary>
/// <returns>A <see cref="Cdm"/> if successful, otherwise <see cref="null"/></returns>
public static async Task<Cdm?> GetCdmAsync()
{
using var persister = AudibleApiStorage.GetAccountsSettingsPersister();
//Check if there are any Android accounts. If not, we can't use Widevine.
if (!persister.Target.Accounts.Any(a => a.IdentityTokens.DeviceType == Resources.DeviceType))
return null;
if (!string.IsNullOrEmpty(persister.Target.Cdm))
{
try
{
var cdm = Convert.FromBase64String(persister.Target.Cdm);
return new Cdm(new Device(cdm));
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Error loading CDM from account settings.");
persister.Target.Cdm = string.Empty;
//Clear the stored Cdm and try getting a fresh one from the server.
}
}
if (string.IsNullOrEmpty(persister.Target.Cdm))
{
using var client = new HttpClient();
if (await GetCdmUris(client) is not Uri[] uris)
return null;
//try to get a CDM file for any account that's registered as an android device.
//CDMs are not account-specific, so it doesn't matter which account we're successful with.
foreach (var account in persister.Target.Accounts.Where(a => a.IdentityTokens.DeviceType == Resources.DeviceType))
{
try
{
var requestMessage = CreateApiRequest(account);
await TestApiRequest(client, new JsonObject { { "body", requestMessage.ToString() } });
//Try all CDM URIs until a CDM has been retrieved successfully
foreach (var uri in uris)
{
try
{
var resp = await client.PostAsync(uri, ((HttpBody)requestMessage).Content);
if (!resp.IsSuccessStatusCode)
{
var message = await resp.Content.ReadAsStringAsync();
throw new ApiErrorException(uri, null, message);
}
var cdmBts = await resp.Content.ReadAsByteArrayAsync();
var device = new Device(cdmBts);
persister.Target.Cdm = Convert.ToBase64String(cdmBts);
return new Cdm(device);
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Error getting a CDM from URI: " + uri);
//try the next URI
}
}
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Error getting a CDM for account: " + account.MaskedLogEntry);
//try the next Account
}
}
}
return null;
}
/// <summary>
/// Get a list of CDM API URIs from the main Gitgub repository's .cdmurls.json file.
/// </summary>
/// <returns>If successful, an array of URIs to try. Otherwise null</returns>
private static async Task<Uri[]?> GetCdmUris(HttpClient httpClient)
{
const string CdmUrlListFile = "https://raw.githubusercontent.com/rmcrackan/Libation/refs/heads/master/.cdmurls.json";
try
{
var fileContents = await httpClient.GetStringAsync(CdmUrlListFile);
var releaseIndex = JObject.Parse(fileContents);
var urlArray = releaseIndex["CdmUrls"] as JArray;
if (urlArray is null)
throw new System.IO.InvalidDataException("CDM url list not found in JSON: " + fileContents);
var uris = urlArray.Select(u => u.Value<string>()).OfType<string>().Select(u => new Uri(u)).ToArray();
if (uris.Length == 0)
throw new System.IO.InvalidDataException("No CDM url found in JSON: " + fileContents);
return uris;
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Error getting CDM URLs");
return null;
}
}
static readonly string[] TLDs = ["com", "co.uk", "com.au", "com.br", "ca", "fr", "de", "in", "it", "co.jp", "es"];
//Ensure that the request can be made successfully before sending it to the API
//The API uses System.Text.Json, so perform test with same.
private static async Task TestApiRequest(HttpClient client, JsonObject input)
{
if (input["body"]?.GetValue<string>() is not string body
|| JsonNode.Parse(body) is not JsonNode bodyJson)
throw new Exception("Api request doesn't contain a body");
if (bodyJson?["Url"]?.GetValue<string>() is not string url
|| !Uri.TryCreate(url, UriKind.Absolute, out var uri))
throw new Exception("Api request doesn't contain a url");
if (!TLDs.Select(tld => "api.audible." + tld).Contains(uri.Host.ToLower()))
throw new Exception($"Unknown Audible Api domain: {uri.Host}");
if (bodyJson?["Headers"] is not JsonObject headers)
throw new Exception($"Api request doesn't contain any headers");
using var request = new HttpRequestMessage(HttpMethod.Get, uri);
Dictionary<string, string>? headersDict = null;
try
{
headersDict = System.Text.Json.JsonSerializer.Deserialize<Dictionary<string, string>>(headers);
}
catch (Exception ex)
{
throw new Exception("Failed to read Audible Api headers.", ex);
}
if (headersDict is null)
throw new Exception("Failed to read Audible Api headers.");
foreach (var kvp in headersDict)
request.Headers.Add(kvp.Key, kvp.Value);
using var resp = await client.SendAsync(request);
resp.EnsureSuccessStatusCode();
}
/// <summary>
/// Create a request body to send to the API
/// </summary>
/// <param name="account">An authenticated account</param>
private static JObject CreateApiRequest(Account account)
{
const string ACCOUNT_INFO_PATH = "/1.0/account/information";
var message = new HttpRequestMessage(HttpMethod.Get, ACCOUNT_INFO_PATH);
message.SignRequest(
DateTime.UtcNow,
account.IdentityTokens.AdpToken,
account.IdentityTokens.PrivateKey);
return new JObject
{
{ "Url", new Uri(account.Locale.AudibleApiUri(), ACCOUNT_INFO_PATH) },
{ "Headers", JObject.FromObject(message.Headers.ToDictionary(kvp => kvp.Key, kvp => kvp.Value.Single())) }
};
}
}

View File

@ -1,300 +0,0 @@
using Google.Protobuf;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
#nullable enable
namespace AudibleUtilities.Widevine;
public enum KeyType
{
/// <summary>
/// Exactly one key of this type must appear.
/// </summary>
Signing = 1,
/// <summary>
/// Content key.
/// </summary>
Content = 2,
/// <summary>
/// Key control block for license renewals. No key.
/// </summary>
KeyControl = 3,
/// <summary>
/// wrapped keys for auxiliary crypto operations.
/// </summary>
OperatorSession = 4,
/// <summary>
/// Entitlement keys.
/// </summary>
Entitlement = 5,
/// <summary>
/// Partner-specific content key.
/// </summary>
OemContent = 6,
}
public interface ISession : IDisposable
{
string? GetLicenseChallenge(MpegDash dash);
WidevineKey[] ParseLicense(string licenseMessage);
}
public class WidevineKey
{
public Guid Kid { get; }
public KeyType Type { get; }
public byte[] Key { get; }
internal WidevineKey(Guid kid, License.Types.KeyContainer.Types.KeyType type, byte[] key)
{
Kid = kid;
Type = (KeyType)type;
Key = key;
}
public override string ToString() => $"{Convert.ToHexString(Kid.ToByteArray(bigEndian: true)).ToLower()}:{Convert.ToHexString(Key).ToLower()}";
}
public partial class Cdm
{
public static Guid WidevineContentProtection { get; } = new("edef8ba9-79d6-4ace-a3c8-27dcd51d21ed");
private const int MAX_NUM_OF_SESSIONS = 16;
internal Device Device { get; }
private ConcurrentDictionary<Guid, Session> Sessions { get; } = new(-1, MAX_NUM_OF_SESSIONS);
internal Cdm(Device device)
{
Device = device;
}
public ISession OpenSession()
{
if (Sessions.Count == MAX_NUM_OF_SESSIONS)
throw new Exception("Too Many Sessions");
var session = new Session(Sessions.Count + 1, this);
var ddd = Sessions.TryAdd(session.Id, session);
return session;
}
#region Session
internal class Session : ISession
{
public Guid Id { get; } = Guid.NewGuid();
private int SessionNumber { get; }
private Cdm Cdm { get; }
private byte[]? EncryptionContext { get; set; }
private byte[]? AuthenticationContext { get; set; }
public Session(int number, Cdm cdm)
{
SessionNumber = number;
Cdm = cdm;
}
private string GetRequestId()
=> $"{RandomUint():x8}00000000{Convert.ToHexString(BitConverter.GetBytes((long)SessionNumber)).ToLowerInvariant()}";
public void Dispose()
{
if (Cdm.Sessions.ContainsKey(Id))
Cdm.Sessions.TryRemove(Id, out var session);
}
public string? GetLicenseChallenge(MpegDash dash)
{
if (!dash.TryGetPssh(Cdm.WidevineContentProtection, out var pssh))
return null;
var licRequest = new LicenseRequest
{
ClientId = Cdm.Device.ClientId,
ContentId = new()
{
WidevinePsshData = new()
{
LicenseType = LicenseType.Offline,
RequestId = ByteString.CopyFrom(GetRequestId(), Encoding.ASCII)
}
},
Type = LicenseRequest.Types.RequestType.New,
RequestTime = DateTimeOffset.UtcNow.ToUnixTimeSeconds(),
ProtocolVersion = ProtocolVersion.Version21,
KeyControlNonce = RandomUint()
};
licRequest.ContentId.WidevinePsshData.PsshData.Add(ByteString.CopyFrom(pssh.InitData));
var licRequestBts = licRequest.ToByteArray();
EncryptionContext = CreateContext("ENCRYPTION", 128, licRequestBts);
AuthenticationContext = CreateContext("AUTHENTICATION", 512, licRequestBts);
var signedMessage = new SignedMessage
{
Type = SignedMessage.Types.MessageType.LicenseRequest,
Msg = ByteString.CopyFrom(licRequestBts),
Signature = ByteString.CopyFrom(Cdm.Device.SignMessage(licRequestBts))
};
return Convert.ToBase64String(signedMessage.ToByteArray());
}
public WidevineKey[] ParseLicense(string licenseMessage)
{
if (EncryptionContext is null || AuthenticationContext is null)
throw new InvalidOperationException($"{nameof(GetLicenseChallenge)}() must be called before calling {nameof(ParseLicense)}()");
var signedMessage = SignedMessage.Parser.ParseFrom(Convert.FromBase64String(licenseMessage));
if (signedMessage.Type != SignedMessage.Types.MessageType.License)
throw new InvalidDataException("Invalid license");
var sessionKey = Cdm.Device.DecryptSessionKey(signedMessage.SessionKey.ToByteArray());
if (!VerifySignature(signedMessage, AuthenticationContext, sessionKey))
throw new InvalidDataException("Message signature is invalid");
var license = License.Parser.ParseFrom(signedMessage.Msg);
var keyToTheKeys = DeriveKey(sessionKey, EncryptionContext, 1);
return DecryptKeys(keyToTheKeys, license.Key);
}
private static WidevineKey[] DecryptKeys(byte[] keyToTheKeys, IList<License.Types.KeyContainer> licenseKeys)
{
using var aes = Aes.Create();
aes.Key = keyToTheKeys;
var keys = new WidevineKey[licenseKeys.Count];
for (int i = 0; i < licenseKeys.Count; i++)
{
var keyContainer = licenseKeys[i];
var keyBytes = aes.DecryptCbc(keyContainer.Key.ToByteArray(), keyContainer.Iv.ToByteArray(), PaddingMode.PKCS7);
var id = keyContainer.Id.ToByteArray();
if (id.Length > 16)
{
var tryB64 = new byte[id.Length * 3 / 4];
if (Convert.TryFromBase64String(Encoding.ASCII.GetString(id), tryB64, out int bytesWritten))
{
id = tryB64;
}
Array.Resize(ref id, 16);
}
else if (id.Length < 16)
{
id = id.Append(new byte[16 - id.Length]);
}
keys[i] = new WidevineKey(new Guid(id,bigEndian: true), keyContainer.Type, keyBytes);
}
return keys;
}
private static bool VerifySignature(SignedMessage signedMessage, byte[] authContext, byte[] sessionKey)
{
var mac_key_server = DeriveKey(sessionKey, authContext, 1).Append(DeriveKey(sessionKey, authContext, 2));
var hmacData = (signedMessage.OemcryptoCoreMessage?.ToByteArray() ?? []).Append(signedMessage.Msg?.ToByteArray() ?? []);
var computed_signature = HMACSHA256.HashData(mac_key_server, hmacData);
return computed_signature.SequenceEqual(signedMessage.Signature);
}
private static byte[] DeriveKey(byte[] session_key, byte[] context, int counter)
{
var data = new byte[context.Length + 1];
Array.Copy(context, 0, data, 1, context.Length);
data[0] = (byte)counter;
return AESCMAC(session_key, data);
}
private static byte[] AESCMAC(byte[] key, byte[] data)
{
using var aes = Aes.Create();
aes.Key = key;
// SubKey generation
// step 1, AES-128 with key K is applied to an all-zero input block.
byte[] subKey = aes.EncryptCbc(new byte[16], new byte[16], PaddingMode.None);
nextSubKey();
// MAC computing
if ((data.Length == 0) || (data.Length % 16 != 0))
{
// If the size of the input message block is not equal to a positive
// multiple of the block size (namely, 128 bits), the last block shall
// be padded with 10^i
nextSubKey();
var padLen = 16 - data.Length % 16;
Array.Resize(ref data, data.Length + padLen);
data[^padLen] = 0x80;
}
// the last block shall be exclusive-OR'ed with K1 before processing
for (int j = 0; j < subKey.Length; j++)
data[data.Length - 16 + j] ^= subKey[j];
// The result of the previous process will be the input of the last encryption.
byte[] encResult = aes.EncryptCbc(data, new byte[16], PaddingMode.None);
byte[] HashValue = new byte[16];
Array.Copy(encResult, encResult.Length - HashValue.Length, HashValue, 0, HashValue.Length);
return HashValue;
void nextSubKey()
{
const byte const_Rb = 0x87;
if (Rol(subKey) != 0)
subKey[15] ^= const_Rb;
static int Rol(byte[] b)
{
int carry = 0;
for (int i = b.Length - 1; i >= 0; i--)
{
ushort u = (ushort)(b[i] << 1);
b[i] = (byte)((u & 0xff) + carry);
carry = (u & 0xff00) >> 8;
}
return carry;
}
}
}
private static byte[] CreateContext(string label, int keySize, byte[] licRequestBts)
{
var contextSize = label.Length + 1 + licRequestBts.Length + sizeof(int);
var context = new byte[contextSize];
var numChars = Encoding.ASCII.GetBytes(label.AsSpan(), context);
Array.Copy(licRequestBts, 0, context, numChars + 1, licRequestBts.Length);
var numBts = BitConverter.GetBytes(keySize);
if (BitConverter.IsLittleEndian)
Array.Reverse(numBts);
Array.Copy(numBts, 0, context, context.Length - sizeof(int), sizeof(int));
return context;
}
private static uint RandomUint()
{
var bts = new byte[4];
new Random().NextBytes(bts);
return BitConverter.ToUInt32(bts, 0);
}
}
#endregion
}

View File

@ -1,155 +0,0 @@
using System;
using System.IO;
using System.Numerics;
using System.Security.Cryptography;
#nullable enable
namespace AudibleUtilities.Widevine;
internal enum DeviceTypes : byte
{
Unknown = 0,
Chrome = 1,
Android = 2
}
internal class Device
{
public DeviceTypes Type { get; }
public int FileVersion { get; }
public int SecurityLevel { get; }
public int Flags { get; }
public RSA CdmKey { get; }
internal ClientIdentification ClientId { get; }
public Device(Span<byte> fileData)
{
if (fileData.Length < 7 || fileData[0] != 'W' || fileData[1] != 'V' || fileData[2] != 'D')
throw new InvalidDataException();
FileVersion = fileData[3];
Type = (DeviceTypes)fileData[4];
SecurityLevel = fileData[5];
Flags = fileData[6];
if (FileVersion != 2)
throw new InvalidDataException($"Unknown CDM File Version: '{FileVersion}'");
if (Type != DeviceTypes.Android)
throw new InvalidDataException($"Unknown CDM Type: '{Type}'");
if (SecurityLevel != 3)
throw new InvalidDataException($"Unknown CDM Security Level: '{SecurityLevel}'");
var privateKeyLength = (fileData[7] << 8) | fileData[8];
if (privateKeyLength <= 0 || fileData.Length < 9 + privateKeyLength + 2)
throw new InvalidDataException($"Invalid private key length: '{privateKeyLength}'");
var clientIdLength = (fileData[9 + privateKeyLength] << 8) | fileData[10 + privateKeyLength];
if (clientIdLength <= 0 || fileData.Length < 11 + privateKeyLength + clientIdLength)
throw new InvalidDataException($"Invalid client id length: '{clientIdLength}'");
ClientId = ClientIdentification.Parser.ParseFrom(fileData.Slice(11 + privateKeyLength));
CdmKey = RSA.Create();
CdmKey.ImportRSAPrivateKey(fileData.Slice(9, privateKeyLength), out _);
}
public byte[] SignMessage(byte[] message)
{
var digestion = SHA1.HashData(message);
return PssSha1Signer.SignHash(CdmKey, digestion);
}
public bool VerifyMessage(byte[] message, byte[] signature)
{
var digestion = SHA1.HashData(message);
return CdmKey.VerifyHash(digestion, signature, HashAlgorithmName.SHA1, RSASignaturePadding.Pss);
}
public byte[] DecryptSessionKey(byte[] sessionKey)
=> CdmKey.Decrypt(sessionKey, RSAEncryptionPadding.OaepSHA1);
/// <summary>
/// Completely managed implementation of RSASSA-PSS using SHA-1.
/// https://github.com/bcgit/bc-csharp/blob/master/crypto/src/crypto/signers/PssSigner.cs
///
/// Absolutely nobody anywhere should use this RSASSA-PSS implementation in anything where they care about security at all. We completely skipped the random salt part of it because libation doesn't need security; it only needs to satisfy Audible server's challenge-response requirements.
/// </summary>
private static class PssSha1Signer
{
private const int Sha1DigestSize = 20;
private const int Trailer = 0xBC;
public static byte[] SignHash(RSA rsa, ReadOnlySpan<byte> hash)
{
ArgumentOutOfRangeException.ThrowIfNotEqual(hash.Length, Sha1DigestSize);
var parameters = rsa.ExportParameters(true);
var Modulus = new BigInteger(parameters.Modulus, isUnsigned: true, isBigEndian: true);
var Exponent = new BigInteger(parameters.D, isUnsigned: true, isBigEndian: true);
var emBits = rsa.KeySize - 1;
var block = new byte[(emBits + 7) / 8];
var firstByteMask = (byte)(0xFFU >> ((block.Length * 8) - emBits));
Span<byte> mDash = new byte[8 + 2 * Sha1DigestSize];
hash.CopyTo(mDash.Slice(8));
var h = SHA1.HashData(mDash);
block[^(2 * (Sha1DigestSize + 1))] = 1;
byte[] dbMask = MaskGeneratorFunction1(h, 0, h.Length, block.Length - Sha1DigestSize - 1);
for (int i = 0; i != dbMask.Length; i++)
block[i] ^= dbMask[i];
h.CopyTo(block, block.Length - Sha1DigestSize - 1);
block[0] &= firstByteMask;
block[^1] = Trailer;
var input = new BigInteger(block, isUnsigned: true, isBigEndian: true);
var result = BigInteger.ModPow(input, Exponent, Modulus);
return result.ToByteArray(isUnsigned: true, isBigEndian: true);
}
private static byte[] MaskGeneratorFunction1(byte[] Z, int zOff, int zLen, int length)
{
byte[] mask = new byte[length];
byte[] hashBuf = new byte[Sha1DigestSize];
byte[] C = new byte[4];
int counter = 0;
using var sha = SHA1.Create();
for (; counter < (length / Sha1DigestSize); counter++)
{
ItoOSP(counter, C);
sha.TransformBlock(Z, zOff, zLen, null, 0);
sha.TransformFinalBlock(C, 0, C.Length);
sha.Hash!.CopyTo(mask, counter * Sha1DigestSize);
}
if ((counter * Sha1DigestSize) < length)
{
ItoOSP(counter, C);
sha.TransformBlock(Z, zOff, zLen, null, 0);
sha.TransformFinalBlock(C, 0, C.Length);
Array.Copy(sha.Hash!, 0, mask, counter * Sha1DigestSize, mask.Length - (counter * Sha1DigestSize));
}
return mask;
}
private static void ItoOSP(int i, byte[] sp)
{
sp[0] = (byte)((uint)i >> 24);
sp[1] = (byte)((uint)i >> 16);
sp[2] = (byte)((uint)i >> 8);
sp[3] = (byte)((uint)i >> 0);
}
}
}

View File

@ -1,15 +0,0 @@
using System;
#nullable enable
namespace AudibleUtilities.Widevine;
internal static class Extensions
{
public static T[] Append<T>(this T[] message, T[] appendData)
{
var origLength = message.Length;
Array.Resize(ref message, origLength + appendData.Length);
Array.Copy(appendData, 0, message, origLength, appendData.Length);
return message;
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,70 +0,0 @@
using Mpeg4Lib.Boxes;
using System;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.Xml;
using System.Xml.Linq;
using System.Xml.XPath;
#nullable enable
namespace AudibleUtilities.Widevine;
public class MpegDash
{
private const string MpegDashNamespace = "urn:mpeg:dash:schema:mpd:2011";
private const string CencNamespace = "urn:mpeg:cenc:2013";
private const string UuidPreamble = "urn:uuid:";
private XElement DashMpd { get; }
private static XmlNamespaceManager NamespaceManager { get; } = new(new NameTable());
static MpegDash()
{
NamespaceManager.AddNamespace("dash", MpegDashNamespace);
NamespaceManager.AddNamespace("cenc", CencNamespace);
}
public MpegDash(Stream contents)
{
DashMpd = XElement.Load(contents);
}
public bool TryGetUri(Uri baseUri, [NotNullWhen(true)] out Uri? fileUri)
{
foreach (var baseUrl in DashMpd.XPathSelectElements("/dash:Period/dash:AdaptationSet/dash:Representation/dash:BaseURL", NamespaceManager))
{
try
{
fileUri = new Uri(baseUri, baseUrl.Value);
return true;
}
catch
{
fileUri = null;
return false;
}
}
fileUri = null;
return false;
}
public bool TryGetPssh(Guid protectionSystemId, [NotNullWhen(true)] out PsshBox? pssh)
{
foreach (var psshEle in DashMpd.XPathSelectElements("/dash:Period/dash:AdaptationSet/dash:ContentProtection/cenc:pssh", NamespaceManager))
{
if (psshEle?.Value?.Trim() is string psshStr
&& psshEle.Parent?.Attribute(XName.Get("schemeIdUri")) is XAttribute scheme
&& scheme.Value is string uuid
&& uuid.Equals(UuidPreamble + protectionSystemId.ToString(), StringComparison.OrdinalIgnoreCase))
{
Span<byte> buffer = new byte[psshStr.Length * 3 / 4];
if (Convert.TryFromBase64String(psshStr, buffer, out var written))
{
using var ms = new MemoryStream(buffer.Slice(0, written).ToArray());
pssh = BoxFactory.CreateBox(ms, null) as PsshBox;
return pssh is not null;
}
}
}
pssh = null;
return false;
}
}

View File

@ -1,70 +0,0 @@
#nullable enable
using Newtonsoft.Json;
namespace DataLayer;
public enum Codec : byte
{
Unknown,
Mp3,
AAC_LC,
xHE_AAC,
EC_3,
AC_4
}
public class AudioFormat
{
public static AudioFormat Default => new(Codec.Unknown, 0, 0, 0);
[JsonIgnore]
public bool IsDefault => Codec is Codec.Unknown && BitRate == 0 && SampleRate == 0 && ChannelCount == 0;
[JsonIgnore]
public Codec Codec { get; set; }
public int SampleRate { get; set; }
public int ChannelCount { get; set; }
public int BitRate { get; set; }
public AudioFormat(Codec codec, int bitRate, int sampleRate, int channelCount)
{
Codec = codec;
BitRate = bitRate;
SampleRate = sampleRate;
ChannelCount = channelCount;
}
public string CodecString => Codec switch
{
Codec.Mp3 => "mp3",
Codec.AAC_LC => "AAC-LC",
Codec.xHE_AAC => "xHE-AAC",
Codec.EC_3 => "EC-3",
Codec.AC_4 => "AC-4",
Codec.Unknown or _ => "[Unknown]",
};
//Property | Start | Num | Max | Current Max |
// | Bit | Bits | Value | Value Used |
//-----------------------------------------------------
//Codec | 35 | 4 | 15 | 5 |
//BitRate | 23 | 12 | 4_095 | 768 |
//SampleRate | 5 | 18 | 262_143 | 48_000 |
//ChannelCount | 0 | 5 | 31 | 6 |
public long Serialize() =>
((long)Codec << 35) |
((long)BitRate << 23) |
((long)SampleRate << 5) |
(long)ChannelCount;
public static AudioFormat Deserialize(long value)
{
var codec = (Codec)((value >> 35) & 15);
var bitRate = (int)((value >> 23) & 4_095);
var sampleRate = (int)((value >> 5) & 262_143);
var channelCount = (int)(value & 31);
return new AudioFormat(codec, bitRate, sampleRate, channelCount);
}
public override string ToString()
=> IsDefault ? "[Unknown Audio Format]"
: $"{CodecString} ({ChannelCount}ch | {SampleRate:N0}Hz | {BitRate}kbps)";
}

View File

@ -1,26 +0,0 @@
using Microsoft.EntityFrameworkCore.Metadata.Builders;
using Microsoft.EntityFrameworkCore;
namespace DataLayer.Configurations
{
internal class BookCategoryConfig : IEntityTypeConfiguration<BookCategory>
{
public void Configure(EntityTypeBuilder<BookCategory> entity)
{
entity.HasKey(bc => new { bc.BookId, bc.CategoryLadderId });
entity.HasIndex(bc => bc.BookId);
entity.HasIndex(bc => bc.CategoryLadderId);
entity
.HasOne(bc => bc.Book)
.WithMany(b => b.CategoriesLink)
.HasForeignKey(bc => bc.BookId);
entity
.HasOne(bc => bc.CategoryLadder)
.WithMany(c => c.BooksLink)
.HasForeignKey(bc => bc.CategoryLadderId);
}
}
}

View File

@ -1,6 +1,5 @@
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Builders; using Microsoft.EntityFrameworkCore.Metadata.Builders;
using System;
namespace DataLayer.Configurations namespace DataLayer.Configurations
{ {
@ -13,13 +12,17 @@ namespace DataLayer.Configurations
entity.OwnsOne(b => b.Rating); entity.OwnsOne(b => b.Rating);
entity.Property(nameof(Book._audioFormat));
// //
// CRUCIAL: ignore unmapped collections, even get-only // CRUCIAL: ignore unmapped collections, even get-only
// //
entity.Ignore(nameof(Book.Authors)); entity.Ignore(nameof(Book.Authors));
entity.Ignore(nameof(Book.Narrators)); entity.Ignore(nameof(Book.Narrators));
entity.Ignore(nameof(Book.TitleWithSubtitle)); entity.Ignore(nameof(Book.AudioFormat));
entity.Ignore(b => b.Categories); //// these don't seem to matter
//entity.Ignore(nameof(Book.AuthorNames));
//entity.Ignore(nameof(Book.NarratorNames));
//entity.Ignore(nameof(Book.HasPdfs));
// OwnsMany: "Can only ever appear on navigation properties of other entity types. // OwnsMany: "Can only ever appear on navigation properties of other entity types.
// Are automatically loaded, and can only be tracked by a DbContext alongside their owner." // Are automatically loaded, and can only be tracked by a DbContext alongside their owner."
@ -45,19 +48,26 @@ namespace DataLayer.Configurations
b_udi.Property(udi => udi.BookId).ValueGeneratedNever(); b_udi.Property(udi => udi.BookId).ValueGeneratedNever();
b_udi.ToTable(nameof(Book.UserDefinedItem)); b_udi.ToTable(nameof(Book.UserDefinedItem));
b_udi.Property(udi => udi.LastDownloaded);
b_udi
.Property(udi => udi.LastDownloadedVersion)
.HasConversion(ver => ver.ToString(), str => Version.Parse(str));
b_udi
.Property(udi => udi.LastDownloadedFormat)
.HasConversion(af => af.Serialize(), str => AudioFormat.Deserialize(str));
b_udi.Property(udi => udi.LastDownloadedFileVersion);
// owns it 1:1, store in same table // owns it 1:1, store in same table
b_udi.OwnsOne(udi => udi.Rating); b_udi.OwnsOne(udi => udi.Rating);
}); });
entity
.Metadata
.FindNavigation(nameof(Book.ContributorsLink))
// PropertyAccessMode.Field : Contributions is a get-only property, not a field, so use its backing field
.SetPropertyAccessMode(PropertyAccessMode.Field);
entity
.Metadata
.FindNavigation(nameof(Book.SeriesLink))
// PropertyAccessMode.Field : Series is a get-only property, not a field, so use its backing field
.SetPropertyAccessMode(PropertyAccessMode.Field);
entity
.HasOne(b => b.Category)
.WithMany()
.HasForeignKey(b => b.CategoryId);
} }
} }
} }

View File

@ -9,12 +9,6 @@ namespace DataLayer.Configurations
{ {
entity.HasKey(c => c.CategoryId); entity.HasKey(c => c.CategoryId);
entity.HasIndex(c => c.AudibleCategoryId); entity.HasIndex(c => c.AudibleCategoryId);
entity.Ignore(c => c.CategoryLadders);
entity
.HasMany(e => e._categoryLadders)
.WithMany(e => e._categories);
} }
} }
} }

View File

@ -1,24 +0,0 @@
using Microsoft.EntityFrameworkCore.Metadata.Builders;
using Microsoft.EntityFrameworkCore;
namespace DataLayer.Configurations
{
internal class CategoryLadderConfig : IEntityTypeConfiguration<CategoryLadder>
{
public void Configure(EntityTypeBuilder<CategoryLadder> entity)
{
entity.HasKey(cl => cl.CategoryLadderId);
entity.Ignore(cl => cl.Categories);
entity
.HasMany(cl => cl._categories)
.WithMany(c => c._categoryLadders);
entity
.Metadata
.FindNavigation(nameof(CategoryLadder.BooksLink))
.SetPropertyAccessMode(PropertyAccessMode.Field);
}
}
}

View File

@ -17,9 +17,6 @@ namespace DataLayer.Configurations
.Metadata .Metadata
.FindNavigation(nameof(Contributor.BooksLink)) .FindNavigation(nameof(Contributor.BooksLink))
.SetPropertyAccessMode(PropertyAccessMode.Field); .SetPropertyAccessMode(PropertyAccessMode.Field);
// seeds go here. examples in Dinah.EntityFrameworkCore.Tests\DbContextFactoryExample.cs
entity.HasData(Contributor.GetEmpty());
} }
} }
} }

View File

@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFramework>net9.0</TargetFramework> <TargetFramework>net6.0</TargetFramework>
</PropertyGroup> </PropertyGroup>
<PropertyGroup> <PropertyGroup>
@ -10,14 +10,14 @@
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="Dinah.Core" Version="9.0.3.1" /> <PackageReference Include="Dinah.Core" Version="5.3.0.1" />
<PackageReference Include="Dinah.EntityFrameworkCore" Version="9.0.0.1" /> <PackageReference Include="Dinah.EntityFrameworkCore" Version="5.0.2.1" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.8"> <PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="6.0.10">
<PrivateAssets>all</PrivateAssets> <PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets> <IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference> </PackageReference>
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="9.0.8" /> <PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="6.0.10" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Tools" Version="9.0.8"> <PackageReference Include="Microsoft.EntityFrameworkCore.Tools" Version="6.0.10">
<PrivateAssets>all</PrivateAssets> <PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets> <IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference> </PackageReference>

View File

@ -0,0 +1,61 @@
using System;
namespace DataLayer
{
internal enum AudioFormatEnum : long
{
//Defining the enum this way ensures that when comparing:
//LC_128_44100_stereo > LC_64_44100_stereo > LC_64_22050_stereo > LC_64_22050_stereo
//This matches how audible interprets these codecs when specifying quality using AudibleApi.DownloadQuality
//I've never seen mono formats.
Unknown = 0,
LC_32_22050_stereo = (32L << 18) | (22050 << 2) | 2,
LC_64_22050_stereo = (64L << 18) | (22050 << 2) | 2,
LC_64_44100_stereo = (64L << 18) | (44100 << 2) | 2,
LC_128_44100_stereo = (128L << 18) | (44100 << 2) | 2,
}
public class AudioFormat : IComparable<AudioFormat>, IComparable
{
internal int AudioFormatID { get; private set; }
public int Bitrate { get; private init; }
public int SampleRate { get; private init; }
public int Channels { get; private init; }
public bool IsValid => Bitrate != 0 && SampleRate != 0 && Channels != 0;
public static AudioFormat FromString(string formatStr)
{
if (Enum.TryParse(formatStr, ignoreCase: true, out AudioFormatEnum enumVal))
return FromEnum(enumVal);
return FromEnum(AudioFormatEnum.Unknown);
}
internal static AudioFormat FromEnum(AudioFormatEnum enumVal)
{
var val = (long)enumVal;
return new()
{
Bitrate = (int)(val >> 18),
SampleRate = (int)(val >> 2) & ushort.MaxValue,
Channels = (int)(val & 3)
};
}
internal AudioFormatEnum ToEnum()
{
var val = (AudioFormatEnum)(((long)Bitrate << 18) | ((long)SampleRate << 2) | (long)Channels);
return Enum.IsDefined(val) ?
val : AudioFormatEnum.Unknown;
}
public override string ToString()
=> IsValid ?
$"{Bitrate} Kbps, {SampleRate / 1000d:F1} kHz, {(Channels == 2 ? "Stereo" : Channels)}" :
"Unknown";
public int CompareTo(AudioFormat other) => ToEnum().CompareTo(other.ToEnum());
public int CompareTo(object obj) => CompareTo(obj as AudioFormat);
}
}

View File

@ -1,6 +1,5 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq; using System.Linq;
using Dinah.Core; using Dinah.Core;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
@ -35,23 +34,26 @@ namespace DataLayer
// immutable // immutable
public string AudibleProductId { get; private set; } public string AudibleProductId { get; private set; }
public string Title { get; private set; } public string Title { get; private set; }
public string Subtitle { get; private set; }
private string _titleWithSubtitle;
public string TitleWithSubtitle => _titleWithSubtitle ??= string.IsNullOrEmpty(Subtitle) ? Title : $"{Title}: {Subtitle}";
public string Description { get; private set; } public string Description { get; private set; }
public int LengthInMinutes { get; private set; } public int LengthInMinutes { get; private set; }
public ContentType ContentType { get; private set; } public ContentType ContentType { get; private set; }
public string Locale { get; private set; } public string Locale { get; private set; }
internal AudioFormatEnum _audioFormat;
public AudioFormat AudioFormat { get => AudioFormat.FromEnum(_audioFormat); set => _audioFormat = value.ToEnum(); }
// mutable // mutable
public string PictureId { get; set; } public string PictureId { get; set; }
public string PictureLarge { get; set; } public string PictureLarge { get; set; }
// book details // book details
public bool IsAbridged { get; private set; } public bool IsAbridged { get; private set; }
public bool IsSpatial { get; private set; }
public DateTime? DatePublished { get; private set; } public DateTime? DatePublished { get; private set; }
public string Language { get; private set; }
// non-null. use "empty pattern"
internal int CategoryId { get; private set; }
public Category Category { get; private set; }
// is owned, not optional 1:1 // is owned, not optional 1:1
public UserDefinedItem UserDefinedItem { get; private set; } public UserDefinedItem UserDefinedItem { get; private set; }
@ -67,12 +69,12 @@ namespace DataLayer
public Book( public Book(
AudibleProductId audibleProductId, AudibleProductId audibleProductId,
string title, string title,
string subtitle,
string description, string description,
int lengthInMinutes, int lengthInMinutes,
ContentType contentType, ContentType contentType,
IEnumerable<Contributor> authors, IEnumerable<Contributor> authors,
IEnumerable<Contributor> narrators, IEnumerable<Contributor> narrators,
Category category,
string localeName) string localeName)
{ {
// validate // validate
@ -88,13 +90,14 @@ namespace DataLayer
// non-ef-ctor init.s // non-ef-ctor init.s
UserDefinedItem = new UserDefinedItem(this); UserDefinedItem = new UserDefinedItem(this);
ContributorsLink = new HashSet<BookContributor>(); _contributorsLink = new HashSet<BookContributor>();
CategoriesLink = new HashSet<BookCategory>();
_seriesLink = new HashSet<SeriesBook>(); _seriesLink = new HashSet<SeriesBook>();
_supplements = new HashSet<Supplement>(); _supplements = new HashSet<Supplement>();
Category = category;
// simple assigns // simple assigns
UpdateTitle(title, subtitle); Title = title.Trim() ?? "";
Description = description?.Trim() ?? ""; Description = description?.Trim() ?? "";
LengthInMinutes = lengthInMinutes; LengthInMinutes = lengthInMinutes;
ContentType = contentType; ContentType = contentType;
@ -104,22 +107,20 @@ namespace DataLayer
ReplaceNarrators(narrators); ReplaceNarrators(narrators);
} }
public void UpdateTitle(string title, string subtitle)
{
Title = title?.Trim() ?? "";
Subtitle = subtitle?.Trim() ?? "";
_titleWithSubtitle = null;
}
public void UpdateLengthInMinutes(int lengthInMinutes)
=> LengthInMinutes = lengthInMinutes;
#region contributors, authors, narrators #region contributors, authors, narrators
internal HashSet<BookContributor> ContributorsLink { get; private set; } // use uninitialised backing fields - this means we can detect if the collection was loaded
private HashSet<BookContributor> _contributorsLink;
// i'd like this to be internal but migration throws this exception when i try:
// Value cannot be null.
// Parameter name: property
public IEnumerable<BookContributor> ContributorsLink
=> _contributorsLink?
.OrderBy(bc => bc.Order)
.ToList();
public IEnumerable<Contributor> Authors => ContributorsLink.ByRole(Role.Author).Select(bc => bc.Contributor).ToList(); public IEnumerable<Contributor> Authors => getContributions(Role.Author).Select(bc => bc.Contributor).ToList();
public IEnumerable<Contributor> Narrators => ContributorsLink.ByRole(Role.Narrator).Select(bc => bc.Contributor).ToList(); public IEnumerable<Contributor> Narrators => getContributions(Role.Narrator).Select(bc => bc.Contributor).ToList();
public string Publisher => ContributorsLink.ByRole(Role.Publisher).SingleOrDefault()?.Contributor.Name; public string Publisher => getContributions(Role.Publisher).SingleOrDefault()?.Contributor.Name;
public void ReplaceAuthors(IEnumerable<Contributor> authors, DbContext context = null) public void ReplaceAuthors(IEnumerable<Contributor> authors, DbContext context = null)
=> replaceContributors(authors, Role.Author, context); => replaceContributors(authors, Role.Author, context);
@ -132,19 +133,15 @@ namespace DataLayer
ArgumentValidator.EnsureEnumerableNotNullOrEmpty(newContributors, nameof(newContributors)); ArgumentValidator.EnsureEnumerableNotNullOrEmpty(newContributors, nameof(newContributors));
// the edge cases of doing local-loaded vs remote-only got weird. just load it // the edge cases of doing local-loaded vs remote-only got weird. just load it
if (ContributorsLink is null) if (_contributorsLink is null)
getEntry(context).Collection(s => s.ContributorsLink).Load(); getEntry(context).Collection(s => s.ContributorsLink).Load();
var isIdentical var roleContributions = getContributions(role);
= ContributorsLink var isIdentical = roleContributions.Select(c => c.Contributor).SequenceEqual(newContributors);
.ByRole(role)
.Select(c => c.Contributor)
.SequenceEqual(newContributors);
if (isIdentical) if (isIdentical)
return; return;
ContributorsLink.RemoveWhere(bc => bc.Role == role); _contributorsLink.RemoveWhere(bc => bc.Role == role);
addNewContributors(newContributors, role); addNewContributors(newContributors, role);
} }
@ -153,9 +150,14 @@ namespace DataLayer
byte order = 0; byte order = 0;
var newContributionsEnum = newContributors.Select(c => new BookContributor(this, c, role, order++)); var newContributionsEnum = newContributors.Select(c => new BookContributor(this, c, role, order++));
var newContributions = new HashSet<BookContributor>(newContributionsEnum); var newContributions = new HashSet<BookContributor>(newContributionsEnum);
ContributorsLink.UnionWith(newContributions); _contributorsLink.UnionWith(newContributions);
} }
private List<BookContributor> getContributions(Role role)
=> ContributorsLink
.Where(a => a.Role == role)
.OrderBy(a => a.Order)
.ToList();
#endregion #endregion
private Microsoft.EntityFrameworkCore.ChangeTracking.EntityEntry<Book> getEntry(DbContext context) private Microsoft.EntityFrameworkCore.ChangeTracking.EntityEntry<Book> getEntry(DbContext context)
@ -170,30 +172,6 @@ namespace DataLayer
return entry; return entry;
} }
#region categories
internal HashSet<BookCategory> CategoriesLink { get; private set; }
private ReadOnlyCollection<BookCategory> _categoriesReadOnly;
public ReadOnlyCollection<BookCategory> Categories
{
get
{
if (_categoriesReadOnly?.SequenceEqual(CategoriesLink) is not true)
_categoriesReadOnly = CategoriesLink.ToList().AsReadOnly();
return _categoriesReadOnly;
}
}
public void SetCategoryLadders(IEnumerable<CategoryLadder> ladders)
{
ArgumentValidator.EnsureNotNull(ladders, nameof(ladders));
//Replace all existing category ladders.
//Some books make have duplicate ladders
CategoriesLink.Clear();
CategoriesLink.UnionWith(ladders.Distinct().Select(l => new BookCategory(this, l)));
}
#endregion
#region series #region series
private HashSet<SeriesBook> _seriesLink; private HashSet<SeriesBook> _seriesLink;
public IEnumerable<SeriesBook> SeriesLink => _seriesLink?.ToList(); public IEnumerable<SeriesBook> SeriesLink => _seriesLink?.ToList();
@ -237,15 +215,22 @@ namespace DataLayer
public void UpdateProductRating(float overallRating, float performanceRating, float storyRating) public void UpdateProductRating(float overallRating, float performanceRating, float storyRating)
=> Rating.Update(overallRating, performanceRating, storyRating); => Rating.Update(overallRating, performanceRating, storyRating);
public void UpdateBookDetails(bool isAbridged, bool? isSpatial, DateTime? datePublished, string language) public void UpdateBookDetails(bool isAbridged, DateTime? datePublished)
{ {
// don't overwrite with default values // don't overwrite with default values
IsAbridged |= isAbridged; IsAbridged |= isAbridged;
IsSpatial = isSpatial ?? IsSpatial;
DatePublished = datePublished ?? DatePublished; DatePublished = datePublished ?? DatePublished;
Language = language?.FirstCharToUpper() ?? Language;
} }
public override string ToString() => $"[{AudibleProductId}] {TitleWithSubtitle}"; public void UpdateCategory(Category category, DbContext context = null)
{
// since category is never null, nullity means it hasn't been loaded
if (Category is null)
getEntry(context).Reference(s => s.Category).Load();
Category = category;
}
public override string ToString() => $"[{AudibleProductId}] {Title}";
} }
} }

View File

@ -1,20 +0,0 @@
using Dinah.Core;
namespace DataLayer
{
public class BookCategory
{
internal int BookId { get; private set; }
internal int CategoryLadderId { get; private set; }
public Book Book { get; private set; }
public CategoryLadder CategoryLadder { get; private set; }
private BookCategory() { }
internal BookCategory(Book book, CategoryLadder categoriesList)
{
Book = ArgumentValidator.EnsureNotNull(book, nameof(book));
CategoryLadder = ArgumentValidator.EnsureNotNull(categoriesList, nameof(categoriesList));
}
}
}

View File

@ -1,9 +1,9 @@
using System.Collections.Generic; using System;
using System.Collections.ObjectModel; using System.Collections.Generic;
using System.Linq; using System.Linq;
using Dinah.Core; using Dinah.Core;
using Microsoft.EntityFrameworkCore;
#nullable enable
namespace DataLayer namespace DataLayer
{ {
public class AudibleCategoryId public class AudibleCategoryId
@ -15,29 +15,20 @@ namespace DataLayer
Id = id; Id = id;
} }
} }
public class Category public class Category
{ {
// Empty is a special case. use private ctor w/o validation
public static Category GetEmpty() => new() { CategoryId = -1, AudibleCategoryId = "", Name = "" };
internal int CategoryId { get; private set; } internal int CategoryId { get; private set; }
public string? AudibleCategoryId { get; private set; } public string AudibleCategoryId { get; private set; }
public string? Name { get; internal set; } public string Name { get; private set; }
public Category ParentCategory { get; private set; }
internal List<CategoryLadder> _categoryLadders = new();
private ReadOnlyCollection<CategoryLadder>? _categoryLaddersReadOnly;
public ReadOnlyCollection<CategoryLadder> CategoryLadders
{
get
{
if (_categoryLaddersReadOnly?.SequenceEqual(_categoryLadders) is not true)
_categoryLaddersReadOnly = _categoryLadders.AsReadOnly();
return _categoryLaddersReadOnly;
}
}
private Category() { } private Category() { }
/// <summary>special id class b/c it's too easy to get string order mixed up</summary> /// <summary>special id class b/c it's too easy to get string order mixed up</summary>
public Category(AudibleCategoryId audibleSeriesId, string name) public Category(AudibleCategoryId audibleSeriesId, string name, Category parentCategory = null)
{ {
ArgumentValidator.EnsureNotNull(audibleSeriesId, nameof(audibleSeriesId)); ArgumentValidator.EnsureNotNull(audibleSeriesId, nameof(audibleSeriesId));
var id = audibleSeriesId.Id; var id = audibleSeriesId.Id;
@ -46,6 +37,15 @@ namespace DataLayer
AudibleCategoryId = id; AudibleCategoryId = id;
Name = name; Name = name;
UpdateParentCategory(parentCategory);
}
public void UpdateParentCategory(Category parentCategory)
{
// don't overwrite with null but not an error
if (parentCategory is not null)
ParentCategory = parentCategory;
} }
public override string ToString() => $"[{AudibleCategoryId}] {Name}"; public override string ToString() => $"[{AudibleCategoryId}] {Name}";

View File

@ -1,58 +0,0 @@
using Dinah.Core;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
#nullable enable
namespace DataLayer
{
public class CategoryLadder : IEquatable<CategoryLadder>
{
internal int CategoryLadderId { get; private set; }
internal List<Category> _categories;
private ReadOnlyCollection<Category>? _categoriesReadOnly;
public ReadOnlyCollection<Category> Categories
{
get
{
if (_categoriesReadOnly?.SequenceEqual(_categories) is not true)
_categoriesReadOnly = _categories.AsReadOnly();
return _categoriesReadOnly;
}
}
private HashSet<BookCategory>? _booksLink;
public IEnumerable<BookCategory>? BooksLink => _booksLink?.ToList();
private CategoryLadder() { _categories = new(); }
public CategoryLadder(List<Category> categories)
{
ArgumentValidator.EnsureNotNull(categories, nameof(categories));
ArgumentValidator.EnsureGreaterThan(categories.Count, nameof(categories), 0);
_booksLink = new HashSet<BookCategory>();
_categories = categories;
}
public override int GetHashCode()
{
HashCode hashCode = default;
foreach (var category in _categories)
hashCode.Add(category.AudibleCategoryId);
return hashCode.ToHashCode();
}
public bool Equals(CategoryLadder? other)
=> other?._categories is not null
&& Equals(other._categories.Select(c => c.AudibleCategoryId));
public bool Equals(IEnumerable<string?>? categoryIds)
=> categoryIds is not null
&& _categories.Select(c => c.AudibleCategoryId).SequenceEqual(categoryIds);
public override bool Equals(object? obj)
=> obj is CategoryLadder other && Equals(other);
public override string ToString() => string.Join(" > ", _categories.Select(c => c.Name));
}
}

View File

@ -43,7 +43,5 @@ namespace DataLayer
} }
public override string ToString() => Name; public override string ToString() => Name;
public void SetAudibleContributorId(string audibleContributorId)
=> AudibleContributorId = audibleContributorId;
} }
} }

View File

@ -11,9 +11,6 @@ namespace DataLayer
public DateTime DateAdded { get; private set; } public DateTime DateAdded { get; private set; }
public string Account { get; private set; } public string Account { get; private set; }
public bool IsDeleted { get; set; }
public bool AbsentFromLastScan { get; set; }
private LibraryBook() { } private LibraryBook() { }
public LibraryBook(Book book, DateTime dateAdded, string account) public LibraryBook(Book book, DateTime dateAdded, string account)
{ {
@ -25,8 +22,6 @@ namespace DataLayer
Account = account; Account = account;
} }
public void SetAccount(string account) => Account = account;
public override string ToString() => $"{DateAdded:d} {Book}"; public override string ToString() => $"{DateAdded:d} {Book}";
} }
} }

View File

@ -5,7 +5,7 @@ using Dinah.Core;
namespace DataLayer namespace DataLayer
{ {
/// <summary>Parameterless ctor and setters should be used by EF only. Everything else should treat it as immutable</summary> /// <summary>Parameterless ctor and setters should be used by EF only. Everything else should treat it as immutable</summary>
public class Rating : ValueObject_Static<Rating>, IComparable<Rating>, IComparable public class Rating : ValueObject_Static<Rating>
{ {
public float OverallRating { get; private set; } public float OverallRating { get; private set; }
public float PerformanceRating { get; private set; } public float PerformanceRating { get; private set; }
@ -39,15 +39,5 @@ namespace DataLayer
} }
public override string ToString() => $"Overall={OverallRating} Perf={PerformanceRating} Story={StoryRating}"; public override string ToString() => $"Overall={OverallRating} Perf={PerformanceRating} Story={StoryRating}";
public int CompareTo(Rating other)
{
var compare = OverallRating.CompareTo(other.OverallRating);
if (compare != 0) return compare;
compare = PerformanceRating.CompareTo(other.PerformanceRating);
if (compare != 0) return compare;
return StoryRating.CompareTo(other.StoryRating);
}
public int CompareTo(object obj) => obj is Rating second ? CompareTo(second) : -1;
} }
} }

View File

@ -20,57 +20,10 @@ namespace DataLayer
PartialDownload = 0x1000 PartialDownload = 0x1000
} }
public partial class UserDefinedItem public class UserDefinedItem
{ {
internal int BookId { get; private set; } internal int BookId { get; private set; }
public Book Book { get; private set; } public Book Book { get; private set; }
/// <summary>
/// Date the audio file was last downloaded.
/// </summary>
public DateTime? LastDownloaded { get; private set; }
/// <summary>
/// Version of Libation used the last time the audio file was downloaded.
/// </summary>
public Version LastDownloadedVersion { get; private set; }
/// <summary>
/// Audio format of the last downloaded audio file.
/// </summary>
public AudioFormat LastDownloadedFormat { get; private set; }
/// <summary>
/// Version of the audio file that was last downloaded.
/// </summary>
public string LastDownloadedFileVersion { get; private set; }
public void SetLastDownloaded(Version libationVersion, AudioFormat audioFormat, string audioVersion)
{
if (LastDownloadedVersion != libationVersion)
{
LastDownloadedVersion = libationVersion;
OnItemChanged(nameof(LastDownloadedVersion));
}
if (LastDownloadedFormat != audioFormat)
{
LastDownloadedFormat = audioFormat;
OnItemChanged(nameof(LastDownloadedFormat));
}
if (LastDownloadedFileVersion != audioVersion)
{
LastDownloadedFileVersion = audioVersion;
OnItemChanged(nameof(LastDownloadedFileVersion));
}
if (libationVersion is null)
{
LastDownloaded = null;
LastDownloadedFormat = null;
LastDownloadedFileVersion = null;
}
else
{
LastDownloaded = DateTime.Now;
OnItemChanged(nameof(LastDownloaded));
}
}
private UserDefinedItem() { } private UserDefinedItem() { }
internal UserDefinedItem(Book book) internal UserDefinedItem(Book book)
@ -98,22 +51,17 @@ namespace DataLayer
public IEnumerable<string> TagsEnumerated => Tags == "" ? new string[0] : Tags.Split(null as char[], StringSplitOptions.RemoveEmptyEntries); public IEnumerable<string> TagsEnumerated => Tags == "" ? new string[0] : Tags.Split(null as char[], StringSplitOptions.RemoveEmptyEntries);
#region sanitize tags: space delimited. Inline/denormalized. Lower case. Alpha numeric and hyphen #region sanitize tags: space delimited. Inline/denormalized. Lower case. Alpha numeric and hyphen
// only legal chars are letters numbers underscores and separating whitespace
/// <summary> //
/// only legal chars are letters numbers underscores and separating whitespace // technically, the only char.s which aren't easily supported are \ [ ]
/// // however, whitelisting is far safer than blacklisting (eg: new lines, non-printable character)
/// technically, the only char.s which aren't easily supported are \ [ ] // it's easy to expand whitelist as needed
/// however, whitelisting is far safer than blacklisting (eg: new lines, non-printable character) // for lucene, ToLower() isn't needed because search is case-inspecific. for here, it prevents duplicates
/// it's easy to expand whitelist as needed //
/// for lucene, ToLower() isn't needed because search is case-inspecific. for here, it prevents duplicates // there are also other allowed but misleading characters. eg: the ^ operator defines a 'boost' score
/// // full list of characters which must be escaped:
/// there are also other allowed but misleading characters. eg: the ^ operator defines a 'boost' score // + - && || ! ( ) { } [ ] ^ " ~ * ? : \
/// full list of characters which must be escaped: static Regex regex { get; } = new Regex(@"[^\w\d\s_]", RegexOptions.Compiled);
/// + - && || ! ( ) { } [ ] ^ " ~ * ? : \
/// </summary>
[GeneratedRegex(@"[^\w\d\s_]")]
private static partial Regex IllegalCharacterRegex();
private static string sanitize(string input) private static string sanitize(string input)
{ {
if (string.IsNullOrWhiteSpace(input)) if (string.IsNullOrWhiteSpace(input))
@ -125,7 +73,7 @@ namespace DataLayer
// assume a hyphen is supposed to be an underscore // assume a hyphen is supposed to be an underscore
.Replace("-", "_"); .Replace("-", "_");
var unique = IllegalCharacterRegex() var unique = regex
// turn illegal characters into a space. this will also take care of turning new lines into spaces // turn illegal characters into a space. this will also take care of turning new lines into spaces
.Replace(str, " ") .Replace(str, " ")
// split and remove excess spaces // split and remove excess spaces
@ -150,11 +98,7 @@ namespace DataLayer
public Rating Rating { get; private set; } = new Rating(0, 0, 0); public Rating Rating { get; private set; } = new Rating(0, 0, 0);
public void UpdateRating(float overallRating, float performanceRating, float storyRating) public void UpdateRating(float overallRating, float performanceRating, float storyRating)
{ => Rating.Update(overallRating, performanceRating, storyRating);
var changed = Rating.OverallRating != overallRating || Rating.PerformanceRating != performanceRating || Rating.StoryRating != storyRating;
Rating.Update(overallRating, performanceRating, storyRating);
if (changed) OnItemChanged(nameof(Rating));
}
#endregion #endregion
#region LiberatedStatuses #region LiberatedStatuses
@ -202,19 +146,10 @@ namespace DataLayer
} }
} }
} }
public void SetPdfStatus(LiberatedStatus? pdfStatus)
{
// don't change whether pdf is actually available. if null, leave as null. if not null, only assign non-null
// null => non-null : only when adding a supplement
if (pdfStatus.HasValue && PdfStatus.HasValue)
PdfStatus = pdfStatus;
}
public LiberatedStatus? PdfStatus public LiberatedStatus? PdfStatus
{ {
get => _pdfStatus; get => _pdfStatus;
internal set set
{ {
if (_pdfStatus != value) if (_pdfStatus != value)
{ {
@ -225,22 +160,6 @@ namespace DataLayer
} }
#endregion #endregion
#region IsFinished
private bool _isFinished;
public bool IsFinished
{
get => _isFinished;
set
{
if (value != _isFinished)
{
_isFinished = value;
OnItemChanged(nameof(IsFinished));
}
}
}
#endregion
public override string ToString() => $"{Book} {Rating} {Tags}"; public override string ToString() => $"{Book} {Rating} {Tags}";
} }
} }

View File

@ -8,12 +8,7 @@ namespace DataLayer
{ {
public static class EntityExtensions public static class EntityExtensions
{ {
public static IEnumerable<BookContributor> ByRole(this IEnumerable<BookContributor> contributors, Role role) public static string TitleSortable(this Book book) => Formatters.GetSortName(book.Title);
=> contributors
.Where(a => a.Role == role)
.OrderBy(a => a.Order);
public static string TitleSortable(this Book book) => Formatters.GetSortName(book.Title + book.Subtitle);
public static string AuthorNames(this Book book) => string.Join(", ", book.Authors.Select(a => a.Name)); public static string AuthorNames(this Book book) => string.Join(", ", book.Authors.Select(a => a.Name));
public static string NarratorNames(this Book book) => string.Join(", ", book.Narrators.Select(n => n.Name)); public static string NarratorNames(this Book book) => string.Join(", ", book.Narrators.Select(n => n.Name));
@ -23,9 +18,9 @@ namespace DataLayer
/// <summary>True if exists and IsLiberated. Else false</summary> /// <summary>True if exists and IsLiberated. Else false</summary>
public static bool PDF_Exists(this Book book) => book.UserDefinedItem.PdfStatus == LiberatedStatus.Liberated; public static bool PDF_Exists(this Book book) => book.UserDefinedItem.PdfStatus == LiberatedStatus.Liberated;
public static string SeriesSortable(this Book book) => Formatters.GetSortName(book.SeriesNames(true)); public static string SeriesSortable(this Book book) => Formatters.GetSortName(book.SeriesNames());
public static bool HasPdf(this Book book) => book.Supplements.Any(); public static bool HasPdf(this Book book) => book.Supplements.Any();
public static string SeriesNames(this Book book, bool includeIndex = false) public static string SeriesNames(this Book book)
{ {
if (book.SeriesLink is null) if (book.SeriesLink is null)
return ""; return "";
@ -33,7 +28,7 @@ namespace DataLayer
// first: alphabetical by name // first: alphabetical by name
var withNames = book.SeriesLink var withNames = book.SeriesLink
.Where(s => !string.IsNullOrWhiteSpace(s.Series.Name)) .Where(s => !string.IsNullOrWhiteSpace(s.Series.Name))
.Select(getSeriesNameString) .Select(s => s.Series.Name)
.OrderBy(a => a) .OrderBy(a => a)
.ToList(); .ToList();
// then un-named are alpha by series id // then un-named are alpha by series id
@ -45,37 +40,15 @@ namespace DataLayer
var all = withNames.Union(nullNames).ToList(); var all = withNames.Union(nullNames).ToList();
return string.Join(", ", all); return string.Join(", ", all);
string getSeriesNameString(SeriesBook sb)
=> includeIndex && !string.IsNullOrWhiteSpace(sb.Order) && sb.Order != "-1"
? $"{sb.Series.Name} (#{sb.Order})"
: sb.Series.Name;
} }
public static string[] CategoriesNames(this Book book)
public static string[] LowestCategoryNames(this Book book) => book.Category is null ? new string[0]
=> book.CategoriesLink?.Any() is not true ? Array.Empty<string>() : book.Category.ParentCategory is null ? new[] { book.Category.Name }
: book : new[] { book.Category.ParentCategory.Name, book.Category.Name };
.CategoriesLink public static string[] CategoriesIds(this Book book)
.Select(cl => cl.CategoryLadder.Categories.LastOrDefault()?.Name) => book.Category is null ? null
.Where(c => c is not null) : book.Category.ParentCategory is null ? new[] { book.Category.AudibleCategoryId }
.Distinct() : new[] { book.Category.ParentCategory.AudibleCategoryId, book.Category.AudibleCategoryId };
.ToArray();
public static string[] AllCategoryNames(this Book book)
=> book.CategoriesLink?.Any() is not true ? Array.Empty<string>()
: book
.CategoriesLink
.SelectMany(cl => cl.CategoryLadder.Categories)
.Select(c => c.Name)
.ToArray();
public static string[] AllCategoryIds(this Book book)
=> book.CategoriesLink?.Any() is not true ? null
: book
.CategoriesLink
.SelectMany(cl => cl.CategoryLadder.Categories)
.Select(c => c.AudibleCategoryId)
.ToArray();
public static string AggregateTitles(this IEnumerable<LibraryBook> libraryBooks, int max = 5) public static string AggregateTitles(this IEnumerable<LibraryBook> libraryBooks, int max = 5)
{ {
@ -84,7 +57,7 @@ namespace DataLayer
max = Math.Max(max, 1); max = Math.Max(max, 1);
var titles = libraryBooks.Select(lb => "- " + lb.Book.TitleWithSubtitle).ToList(); var titles = libraryBooks.Select(lb => "- " + lb.Book.Title).ToList();
var titlesAgg = titles.Take(max).Aggregate((a, b) => $"{a}\r\n{b}"); var titlesAgg = titles.Take(max).Aggregate((a, b) => $"{a}\r\n{b}");
if (titles.Count == max + 1) if (titles.Count == max + 1)
titlesAgg += $"\r\n\r\nand 1 other"; titlesAgg += $"\r\n\r\nand 1 other";
@ -120,9 +93,7 @@ namespace DataLayer
var starString = new string(STAR, fullStars); var starString = new string(STAR, fullStars);
if (score - fullStars >= 0.75f) if (score - fullStars >= 0.25f)
starString += STAR;
else if (score - fullStars >= 0.25f)
starString += HALF; starString += HALF;
return starString; return starString;

View File

@ -23,7 +23,6 @@ namespace DataLayer
public DbSet<Contributor> Contributors { get; private set; } public DbSet<Contributor> Contributors { get; private set; }
public DbSet<Series> Series { get; private set; } public DbSet<Series> Series { get; private set; }
public DbSet<Category> Categories { get; private set; } public DbSet<Category> Categories { get; private set; }
public DbSet<CategoryLadder> CategoryLadders { get; private set; }
public static LibationContext Create(string connectionString) public static LibationContext Create(string connectionString)
{ {
@ -47,8 +46,15 @@ namespace DataLayer
modelBuilder.ApplyConfiguration(new SeriesConfig()); modelBuilder.ApplyConfiguration(new SeriesConfig());
modelBuilder.ApplyConfiguration(new SeriesBookConfig()); modelBuilder.ApplyConfiguration(new SeriesBookConfig());
modelBuilder.ApplyConfiguration(new CategoryConfig()); modelBuilder.ApplyConfiguration(new CategoryConfig());
modelBuilder.ApplyConfiguration(new CategoryLadderConfig());
modelBuilder.ApplyConfiguration(new BookCategoryConfig()); // seeds go here. examples in Dinah.EntityFrameworkCore.Tests\DbContextFactoryExample.cs
modelBuilder
.Entity<Category>()
.HasData(Category.GetEmpty());
modelBuilder
.Entity<Contributor>()
.HasData(Contributor.GetEmpty());
// views are now supported via "keyless entity types" (instead of "entity types" or the prev "query types"): // views are now supported via "keyless entity types" (instead of "entity types" or the prev "query types"):
// https://docs.microsoft.com/en-us/ef/core/modeling/keyless-entity-types // https://docs.microsoft.com/en-us/ef/core/modeling/keyless-entity-types

View File

@ -1,14 +1,11 @@
using Dinah.EntityFrameworkCore; using Dinah.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Diagnostics;
namespace DataLayer namespace DataLayer
{ {
public class LibationContextFactory : DesignTimeDbContextFactoryBase<LibationContext> public class LibationContextFactory : DesignTimeDbContextFactoryBase<LibationContext>
{ {
protected override LibationContext CreateNewInstance(DbContextOptions<LibationContext> options) => new LibationContext(options); protected override LibationContext CreateNewInstance(DbContextOptions<LibationContext> options) => new LibationContext(options);
protected override void UseDatabaseEngine(DbContextOptionsBuilder optionsBuilder, string connectionString) protected override void UseDatabaseEngine(DbContextOptionsBuilder optionsBuilder, string connectionString) => optionsBuilder.UseSqlite(connectionString);
=> optionsBuilder.ConfigureWarnings(w => w.Ignore(RelationalEventId.PendingModelChangesWarning))
.UseSqlite(connectionString, ob => ob.UseQuerySplittingBehavior(QuerySplittingBehavior.SplitQuery));
} }
} }

View File

@ -1,401 +0,0 @@
// <auto-generated />
using System;
using DataLayer;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
#nullable disable
namespace DataLayer.Migrations
{
[DbContext(typeof(LibationContext))]
[Migration("20221214205106_LibraryBookIsDeleted")]
partial class LibraryBookIsDeleted
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder.HasAnnotation("ProductVersion", "7.0.0");
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Property<int>("BookId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleProductId")
.HasColumnType("TEXT");
b.Property<int>("CategoryId")
.HasColumnType("INTEGER");
b.Property<int>("ContentType")
.HasColumnType("INTEGER");
b.Property<DateTime?>("DatePublished")
.HasColumnType("TEXT");
b.Property<string>("Description")
.HasColumnType("TEXT");
b.Property<bool>("IsAbridged")
.HasColumnType("INTEGER");
b.Property<int>("LengthInMinutes")
.HasColumnType("INTEGER");
b.Property<string>("Locale")
.HasColumnType("TEXT");
b.Property<string>("PictureId")
.HasColumnType("TEXT");
b.Property<string>("PictureLarge")
.HasColumnType("TEXT");
b.Property<string>("Title")
.HasColumnType("TEXT");
b.Property<long>("_audioFormat")
.HasColumnType("INTEGER");
b.HasKey("BookId");
b.HasIndex("AudibleProductId");
b.HasIndex("CategoryId");
b.ToTable("Books");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<int>("ContributorId")
.HasColumnType("INTEGER");
b.Property<int>("Role")
.HasColumnType("INTEGER");
b.Property<byte>("Order")
.HasColumnType("INTEGER");
b.HasKey("BookId", "ContributorId", "Role");
b.HasIndex("BookId");
b.HasIndex("ContributorId");
b.ToTable("BookContributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.Property<int>("CategoryId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleCategoryId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int?>("ParentCategoryCategoryId")
.HasColumnType("INTEGER");
b.HasKey("CategoryId");
b.HasIndex("AudibleCategoryId");
b.HasIndex("ParentCategoryCategoryId");
b.ToTable("Categories");
b.HasData(
new
{
CategoryId = -1,
AudibleCategoryId = "",
Name = ""
});
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Property<int>("ContributorId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleContributorId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("ContributorId");
b.HasIndex("Name");
b.ToTable("Contributors");
b.HasData(
new
{
ContributorId = -1,
Name = ""
});
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<string>("Account")
.HasColumnType("TEXT");
b.Property<DateTime>("DateAdded")
.HasColumnType("TEXT");
b.Property<bool>("IsDeleted")
.HasColumnType("INTEGER");
b.HasKey("BookId");
b.ToTable("LibraryBooks");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Property<int>("SeriesId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleSeriesId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("SeriesId");
b.HasIndex("AudibleSeriesId");
b.ToTable("Series");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<string>("Order")
.HasColumnType("TEXT");
b.HasKey("SeriesId", "BookId");
b.HasIndex("BookId");
b.HasIndex("SeriesId");
b.ToTable("SeriesBook");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.HasOne("DataLayer.Category", "Category")
.WithMany()
.HasForeignKey("CategoryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.OwnsOne("DataLayer.Rating", "Rating", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<float>("OverallRating")
.HasColumnType("REAL");
b1.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b1.Property<float>("StoryRating")
.HasColumnType("REAL");
b1.HasKey("BookId");
b1.ToTable("Books");
b1.WithOwner()
.HasForeignKey("BookId");
});
b.OwnsMany("DataLayer.Supplement", "Supplements", b1 =>
{
b1.Property<int>("SupplementId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<string>("Url")
.HasColumnType("TEXT");
b1.HasKey("SupplementId");
b1.HasIndex("BookId");
b1.ToTable("Supplement");
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.Navigation("Book");
});
b.OwnsOne("DataLayer.UserDefinedItem", "UserDefinedItem", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<int>("BookStatus")
.HasColumnType("INTEGER");
b1.Property<int?>("PdfStatus")
.HasColumnType("INTEGER");
b1.Property<string>("Tags")
.HasColumnType("TEXT");
b1.HasKey("BookId");
b1.ToTable("UserDefinedItem", (string)null);
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.OwnsOne("DataLayer.Rating", "Rating", b2 =>
{
b2.Property<int>("UserDefinedItemBookId")
.HasColumnType("INTEGER");
b2.Property<float>("OverallRating")
.HasColumnType("REAL");
b2.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b2.Property<float>("StoryRating")
.HasColumnType("REAL");
b2.HasKey("UserDefinedItemBookId");
b2.ToTable("UserDefinedItem");
b2.WithOwner()
.HasForeignKey("UserDefinedItemBookId");
});
b1.Navigation("Book");
b1.Navigation("Rating");
});
b.Navigation("Category");
b.Navigation("Rating");
b.Navigation("Supplements");
b.Navigation("UserDefinedItem");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("ContributorsLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Contributor", "Contributor")
.WithMany("BooksLink")
.HasForeignKey("ContributorId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Contributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.HasOne("DataLayer.Category", "ParentCategory")
.WithMany()
.HasForeignKey("ParentCategoryCategoryId");
b.Navigation("ParentCategory");
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithOne()
.HasForeignKey("DataLayer.LibraryBook", "BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("SeriesLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Series", "Series")
.WithMany("BooksLink")
.HasForeignKey("SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Series");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Navigation("ContributorsLink");
b.Navigation("SeriesLink");
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Navigation("BooksLink");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Navigation("BooksLink");
});
#pragma warning restore 612, 618
}
}
}

View File

@ -1,29 +0,0 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DataLayer.Migrations
{
/// <inheritdoc />
public partial class LibraryBookIsDeleted : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<bool>(
name: "IsDeleted",
table: "LibraryBooks",
type: "INTEGER",
nullable: false,
defaultValue: false);
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "IsDeleted",
table: "LibraryBooks");
}
}
}

View File

@ -1,404 +0,0 @@
// <auto-generated />
using System;
using DataLayer;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
#nullable disable
namespace DataLayer.Migrations
{
[DbContext(typeof(LibationContext))]
[Migration("20230201162454_AddBookLanguage")]
partial class AddBookLanguage
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder.HasAnnotation("ProductVersion", "7.0.2");
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Property<int>("BookId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleProductId")
.HasColumnType("TEXT");
b.Property<int>("CategoryId")
.HasColumnType("INTEGER");
b.Property<int>("ContentType")
.HasColumnType("INTEGER");
b.Property<DateTime?>("DatePublished")
.HasColumnType("TEXT");
b.Property<string>("Description")
.HasColumnType("TEXT");
b.Property<bool>("IsAbridged")
.HasColumnType("INTEGER");
b.Property<string>("Language")
.HasColumnType("TEXT");
b.Property<int>("LengthInMinutes")
.HasColumnType("INTEGER");
b.Property<string>("Locale")
.HasColumnType("TEXT");
b.Property<string>("PictureId")
.HasColumnType("TEXT");
b.Property<string>("PictureLarge")
.HasColumnType("TEXT");
b.Property<string>("Title")
.HasColumnType("TEXT");
b.Property<long>("_audioFormat")
.HasColumnType("INTEGER");
b.HasKey("BookId");
b.HasIndex("AudibleProductId");
b.HasIndex("CategoryId");
b.ToTable("Books");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<int>("ContributorId")
.HasColumnType("INTEGER");
b.Property<int>("Role")
.HasColumnType("INTEGER");
b.Property<byte>("Order")
.HasColumnType("INTEGER");
b.HasKey("BookId", "ContributorId", "Role");
b.HasIndex("BookId");
b.HasIndex("ContributorId");
b.ToTable("BookContributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.Property<int>("CategoryId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleCategoryId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int?>("ParentCategoryCategoryId")
.HasColumnType("INTEGER");
b.HasKey("CategoryId");
b.HasIndex("AudibleCategoryId");
b.HasIndex("ParentCategoryCategoryId");
b.ToTable("Categories");
b.HasData(
new
{
CategoryId = -1,
AudibleCategoryId = "",
Name = ""
});
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Property<int>("ContributorId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleContributorId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("ContributorId");
b.HasIndex("Name");
b.ToTable("Contributors");
b.HasData(
new
{
ContributorId = -1,
Name = ""
});
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<string>("Account")
.HasColumnType("TEXT");
b.Property<DateTime>("DateAdded")
.HasColumnType("TEXT");
b.Property<bool>("IsDeleted")
.HasColumnType("INTEGER");
b.HasKey("BookId");
b.ToTable("LibraryBooks");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Property<int>("SeriesId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleSeriesId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("SeriesId");
b.HasIndex("AudibleSeriesId");
b.ToTable("Series");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<string>("Order")
.HasColumnType("TEXT");
b.HasKey("SeriesId", "BookId");
b.HasIndex("BookId");
b.HasIndex("SeriesId");
b.ToTable("SeriesBook");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.HasOne("DataLayer.Category", "Category")
.WithMany()
.HasForeignKey("CategoryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.OwnsOne("DataLayer.Rating", "Rating", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<float>("OverallRating")
.HasColumnType("REAL");
b1.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b1.Property<float>("StoryRating")
.HasColumnType("REAL");
b1.HasKey("BookId");
b1.ToTable("Books");
b1.WithOwner()
.HasForeignKey("BookId");
});
b.OwnsMany("DataLayer.Supplement", "Supplements", b1 =>
{
b1.Property<int>("SupplementId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<string>("Url")
.HasColumnType("TEXT");
b1.HasKey("SupplementId");
b1.HasIndex("BookId");
b1.ToTable("Supplement");
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.Navigation("Book");
});
b.OwnsOne("DataLayer.UserDefinedItem", "UserDefinedItem", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<int>("BookStatus")
.HasColumnType("INTEGER");
b1.Property<int?>("PdfStatus")
.HasColumnType("INTEGER");
b1.Property<string>("Tags")
.HasColumnType("TEXT");
b1.HasKey("BookId");
b1.ToTable("UserDefinedItem", (string)null);
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.OwnsOne("DataLayer.Rating", "Rating", b2 =>
{
b2.Property<int>("UserDefinedItemBookId")
.HasColumnType("INTEGER");
b2.Property<float>("OverallRating")
.HasColumnType("REAL");
b2.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b2.Property<float>("StoryRating")
.HasColumnType("REAL");
b2.HasKey("UserDefinedItemBookId");
b2.ToTable("UserDefinedItem");
b2.WithOwner()
.HasForeignKey("UserDefinedItemBookId");
});
b1.Navigation("Book");
b1.Navigation("Rating");
});
b.Navigation("Category");
b.Navigation("Rating");
b.Navigation("Supplements");
b.Navigation("UserDefinedItem");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("ContributorsLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Contributor", "Contributor")
.WithMany("BooksLink")
.HasForeignKey("ContributorId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Contributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.HasOne("DataLayer.Category", "ParentCategory")
.WithMany()
.HasForeignKey("ParentCategoryCategoryId");
b.Navigation("ParentCategory");
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithOne()
.HasForeignKey("DataLayer.LibraryBook", "BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("SeriesLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Series", "Series")
.WithMany("BooksLink")
.HasForeignKey("SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Series");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Navigation("ContributorsLink");
b.Navigation("SeriesLink");
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Navigation("BooksLink");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Navigation("BooksLink");
});
#pragma warning restore 612, 618
}
}
}

View File

@ -1,28 +0,0 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DataLayer.Migrations
{
/// <inheritdoc />
public partial class AddBookLanguage : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<string>(
name: "Language",
table: "Books",
type: "TEXT",
nullable: true);
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "Language",
table: "Books");
}
}
}

Some files were not shown because too many files have changed in this diff Show More