Compare commits

..

No commits in common. "master" and "v4.0.9" have entirely different histories.

1032 changed files with 19038 additions and 87063 deletions

View File

@ -1,5 +0,0 @@
{
"CdmUrls": [
"https://ollj0gz40d.execute-api.us-west-2.amazonaws.com/default/AudibleCdm"
]
}

View File

@ -1,43 +0,0 @@
---
name: Bug report
about: Create a report to help us improve Libation
title: ''
labels: bug
assignees: ''
---
PLEASE FILL OUT THE FOLLOWING. Bug reports with limited information or lacking an attached log file may get limited or delayed help.
___
## Describe the bug
A clear and concise description of what the bug is.
## To Reproduce
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
## Expected behavior
A clear and concise description of what you expected to happen.
## Screenshots
If applicable, add screenshots to help explain your problem.
## Platform
[e.g. Windows 10, Windows 11, Mac, Linux (State distribution)]
## Log Files
Attach your Libation log file here. If your user folder contains the file "LibationCrash.log", attach that also.
**Default Log File Locations**
|Platform|Folder|
|-|-|
|Windows|`%userprofile%\Libation`|
|macOS|`~/Library/Application Support/Libation`|
|Linux|`~/.local/share/Libation`|
Alternative, you may open the log file folder from within Libation. Open Libation's settings, and on the first tab in Settings you can click the button 'Open log folder'.

View File

@ -1,31 +0,0 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: enhancement
assignees: ''
---
**No-go ideas**
There are lots of great ideas and many are beyond what we intend to do for Libation. Some good ideas which we do not intend to pursue:
* comprehensive api/cli
* aax/audiobook import
* bulk rename of existing files
* general metadata/tag editor
* playback features
* web gui
* supporting non-audible vendors
* official docker support
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

View File

@ -1,8 +0,0 @@
---
version: 2
updates:
# Maintain dependencies for GitHub Actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"

View File

@ -1,132 +0,0 @@
# build-linux.yml
# Reusable workflow that builds the Linux and MacOS (x64 and arm64) versions of Libation.
---
name: build
on:
workflow_call:
inputs:
version_override:
type: string
description: "Version number override"
required: false
run_unit_tests:
type: boolean
description: "Skip running unit tests"
required: false
default: true
runs_on:
type: string
description: "The GitHub hosted runner to use"
required: true
OS:
type: string
description: >
The operating system targeted by the build.
There must be a corresponding Bundle_$OS.sh script file in ./Scripts
required: true
architecture:
type: string
description: "CPU architecture targeted by the build."
required: true
env:
DOTNET_CONFIGURATION: "Release"
DOTNET_VERSION: "9.0.x"
RELEASE_NAME: "chardonnay"
jobs:
build:
name: "${{ inputs.OS }}-${{ inputs.architecture }}"
runs-on: ${{ inputs.runs_on }}
steps:
- uses: actions/checkout@v5
- name: Setup .NET
uses: actions/setup-dotnet@v5
with:
dotnet-version: ${{ env.DOTNET_VERSION }}
env:
NUGET_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Get version
id: get_version
run: |
inputVersion="${{ inputs.version_override }}"
if [[ "${#inputVersion}" -gt 0 ]]
then
version="${inputVersion}"
else
version="$(grep -Eio -m 1 '<Version>.*</Version>' ./Source/AppScaffolding/AppScaffolding.csproj | sed -r 's/<\/?Version>//g')"
fi
echo "version=${version}" >> "${GITHUB_OUTPUT}"
- name: Unit test
if: ${{ inputs.run_unit_tests }}
working-directory: ./Source
run: dotnet test
- name: Publish
id: publish
working-directory: ./Source
run: |
if [[ "${{ inputs.OS }}" == "MacOS" ]]
then
display_os="macOS"
RUNTIME_ID="osx-${{ inputs.architecture }}"
else
display_os="Linux"
RUNTIME_ID="linux-${{ inputs.architecture }}"
fi
OUTPUT="bin/Publish/${display_os}-${{ inputs.architecture }}-${{ env.RELEASE_NAME }}"
echo "display_os=${display_os}" >> $GITHUB_OUTPUT
echo "Runtime Identifier: $RUNTIME_ID"
echo "Output Directory: $OUTPUT"
dotnet publish \
LibationAvalonia/LibationAvalonia.csproj \
--runtime $RUNTIME_ID \
--configuration ${{ env.DOTNET_CONFIGURATION }} \
--output $OUTPUT \
-p:PublishProfile=LibationAvalonia/Properties/PublishProfiles/${display_os}Profile.pubxml
dotnet publish \
LoadByOS/${display_os}ConfigApp/${display_os}ConfigApp.csproj \
--runtime $RUNTIME_ID \
--configuration ${{ env.DOTNET_CONFIGURATION }} \
--output $OUTPUT \
-p:PublishProfile=LoadByOS/Properties/${display_os}ConfigApp/PublishProfiles/${display_os}Profile.pubxml
dotnet publish \
LibationCli/LibationCli.csproj \
--runtime $RUNTIME_ID \
--configuration ${{ env.DOTNET_CONFIGURATION }} \
--output $OUTPUT \
-p:PublishProfile=LibationCli/Properties/PublishProfiles/${display_os}Profile.pubxml
dotnet publish \
HangoverAvalonia/HangoverAvalonia.csproj \
--runtime $RUNTIME_ID \
--configuration ${{ env.DOTNET_CONFIGURATION }} \
--output $OUTPUT \
-p:PublishProfile=HangoverAvalonia/Properties/PublishProfiles/${display_os}Profile.pubxml
- name: Build bundle
id: bundle
working-directory: ./Source/bin/Publish/${{ steps.publish.outputs.display_os }}-${{ inputs.architecture }}-${{ env.RELEASE_NAME }}
run: |
BUNDLE_DIR=$(pwd)
echo "Bundle dir: ${BUNDLE_DIR}"
cd ..
SCRIPT=../../../Scripts/Bundle_${{ inputs.OS }}.sh
chmod +rx ${SCRIPT}
${SCRIPT} "${BUNDLE_DIR}" "${{ steps.get_version.outputs.version }}" "${{ inputs.architecture }}"
artifact=$(ls ./bundle)
echo "artifact=${artifact}" >> "${GITHUB_OUTPUT}"
- name: Publish bundle
uses: actions/upload-artifact@v4
with:
name: ${{ steps.bundle.outputs.artifact }}
path: ./Source/bin/Publish/bundle/${{ steps.bundle.outputs.artifact }}
if-no-files-found: error
retention-days: 7

View File

@ -1,118 +0,0 @@
# build-windows.yml
# Reusable workflow that builds the Windows versions of Libation.
---
name: build
on:
workflow_call:
inputs:
version_override:
type: string
description: "Version number override"
required: false
run_unit_tests:
type: boolean
description: "Skip running unit tests"
required: false
default: true
architecture:
type: string
description: "CPU architecture targeted by the build."
required: true
env:
DOTNET_CONFIGURATION: "Release"
DOTNET_VERSION: "9.0.x"
jobs:
build:
name: "${{ matrix.os }}-${{ matrix.release_name }}-${{ inputs.architecture }}"
runs-on: windows-latest
env:
OUTPUT_NAME: "${{ matrix.os }}-${{ matrix.release_name }}-${{ inputs.architecture }}"
RUNTIME_ID: "win-${{ inputs.architecture }}"
strategy:
matrix:
os: [Windows]
ui: [Avalonia]
release_name: [chardonnay]
include:
- os: Windows
ui: WinForms
release_name: classic
prefix: Classic-
steps:
- uses: actions/checkout@v5
- name: Setup .NET
uses: actions/setup-dotnet@v5
with:
dotnet-version: ${{ env.DOTNET_VERSION }}
env:
NUGET_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Get version
id: get_version
run: |
if ("${{ inputs.version_override }}".length -gt 0) {
$version = "${{ inputs.version_override }}"
} else {
$version = (Select-Xml -Path "./Source/AppScaffolding/AppScaffolding.csproj" -XPath "/Project/PropertyGroup/Version").Node.InnerXML.Trim()
}
"version=$version" >> $env:GITHUB_OUTPUT
- name: Unit test
if: ${{ inputs.run_unit_tests }}
working-directory: ./Source
run: dotnet test
- name: Publish
working-directory: ./Source
run: |
dotnet publish `
Libation${{ matrix.ui }}/Libation${{ matrix.ui }}.csproj `
--runtime ${{ env.RUNTIME_ID }} `
--configuration ${{ env.DOTNET_CONFIGURATION }} `
--output bin/Publish/${{ env.OUTPUT_NAME }} `
-p:PublishProfile=Libation${{ matrix.ui }}/Properties/PublishProfiles/${{ matrix.os }}Profile.pubxml
dotnet publish `
LoadByOS/${{ matrix.os }}ConfigApp/${{ matrix.os }}ConfigApp.csproj `
--runtime ${{ env.RUNTIME_ID }} `
--configuration ${{ env.DOTNET_CONFIGURATION }} `
--output bin/Publish/${{ env.OUTPUT_NAME }} `
-p:PublishProfile=LoadByOS/${{ matrix.os }}ConfigApp/PublishProfiles/${{ matrix.os }}Profile.pubxml
dotnet publish `
LibationCli/LibationCli.csproj `
--runtime ${{ env.RUNTIME_ID }} `
--configuration ${{ env.DOTNET_CONFIGURATION }} `
--output bin/Publish/${{ env.OUTPUT_NAME }} `
-p:DefineConstants="${{ matrix.release_name }}" `
-p:PublishProfile=LibationCli/Properties/PublishProfiles/${{ matrix.os }}Profile.pubxml
dotnet publish `
Hangover${{ matrix.ui }}/Hangover${{ matrix.ui }}.csproj `
--runtime ${{ env.RUNTIME_ID }} `
--configuration ${{ env.DOTNET_CONFIGURATION }} `
--output bin/Publish/${{ env.OUTPUT_NAME }} `
-p:PublishProfile=Hangover${{ matrix.ui }}/Properties/PublishProfiles/${{ matrix.os }}Profile.pubxml
- name: Zip artifact
id: zip
working-directory: ./Source/bin/Publish
run: |
$bin_dir = "${{ env.OUTPUT_NAME }}\"
$delfiles = @(
"WindowsConfigApp.exe",
"WindowsConfigApp.runtimeconfig.json",
"WindowsConfigApp.deps.json"
)
foreach ($file in $delfiles){ if (test-path $bin_dir$file){ Remove-Item $bin_dir$file } }
$artifact="${{ matrix.prefix }}Libation.${{ steps.get_version.outputs.version }}-" + "${{ matrix.os }}".ToLower() + "-${{ matrix.release_name }}-${{ inputs.architecture }}"
"artifact=$artifact" >> $env:GITHUB_OUTPUT
Compress-Archive -Path "${bin_dir}*" -DestinationPath "$artifact.zip"
- name: Publish artifact
uses: actions/upload-artifact@v4
with:
name: ${{ steps.zip.outputs.artifact }}.zip
path: ./Source/bin/Publish/${{ steps.zip.outputs.artifact }}.zip
if-no-files-found: error
retention-days: 7

View File

@ -1,53 +0,0 @@
# build.yml
# Reusable workflow that builds Libation for all platforms.
---
name: build
on:
workflow_call:
inputs:
version_override:
type: string
description: "Version number override"
required: false
run_unit_tests:
type: boolean
description: "Skip running unit tests"
required: false
default: true
jobs:
windows:
strategy:
matrix:
architecture: [x64]
uses: ./.github/workflows/build-windows.yml
with:
version_override: ${{ inputs.version_override }}
run_unit_tests: ${{ inputs.run_unit_tests }}
architecture: ${{ matrix.architecture }}
linux:
strategy:
matrix:
OS: [Redhat, Debian]
architecture: [x64, arm64]
uses: ./.github/workflows/build-linux.yml
with:
version_override: ${{ inputs.version_override }}
runs_on: ubuntu-latest
OS: ${{ matrix.OS }}
architecture: ${{ matrix.architecture }}
run_unit_tests: ${{ inputs.run_unit_tests }}
macos:
strategy:
matrix:
architecture: [x64, arm64]
uses: ./.github/workflows/build-linux.yml
with:
version_override: ${{ inputs.version_override }}
runs_on: macos-latest
OS: MacOS
architecture: ${{ matrix.architecture }}
run_unit_tests: ${{ inputs.run_unit_tests }}

View File

@ -1,63 +0,0 @@
# docker.yml
# Reusable workflow that builds a docker image for Libation.
---
name: docker
on:
workflow_call:
inputs:
version:
type: string
description: "Version number"
required: true
release:
type: boolean
description: "Is this a release build?"
required: true
secrets:
docker_username:
required: true
docker_token:
required: true
jobs:
build_and_push:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v5
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
if: ${{ inputs.release }}
uses: docker/login-action@v3
with:
username: ${{ secrets.docker_username }}
password: ${{ secrets.docker_token }}
- name: Generate docker image tags
id: metadata
uses: docker/metadata-action@v5
with:
flavor: |
latest=true
images: |
name=${{ secrets.docker_username }}/libation
tags: |
type=raw,value=${{ inputs.version }},enable=${{ inputs.release }}
- name: Build and push image
uses: docker/build-push-action@v6
with:
platforms: linux/amd64,linux/arm64
push: ${{ steps.metadata.outputs.tags != ''}}
cache-from: type=gha
cache-to: type=gha,mode=max
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}

View File

@ -1,58 +0,0 @@
# release.yml
# Builds and creates the release on any tags starting with a `v`
---
name: release
on:
push:
tags:
- "v*"
jobs:
prerelease:
runs-on: ubuntu-latest
outputs:
version: ${{ steps.get_version.outputs.version }}
steps:
- name: Get tag version
id: get_version
run: |
export TAG="${{ github.ref_name }}"
echo "version=${TAG#v}" >> "${GITHUB_OUTPUT}"
docker:
needs: [prerelease]
uses: ./.github/workflows/docker.yml
with:
version: ${{ needs.prerelease.outputs.version }}
release: true
secrets:
docker_username: ${{ secrets.DOCKERHUB_USERNAME }}
docker_token: ${{ secrets.DOCKERHUB_TOKEN }}
build:
needs: [prerelease]
uses: ./.github/workflows/build.yml
with:
version_override: ${{ needs.prerelease.outputs.version }}
run_unit_tests: false
release:
needs: [prerelease, build]
runs-on: ubuntu-latest
steps:
- name: Download artifacts
uses: actions/download-artifact@v5
with:
path: artifacts
pattern: "*(Classic-)Libation.*"
- name: Release
id: release
uses: softprops/action-gh-release@v2
with:
name: Libation ${{ needs.prerelease.outputs.version }}
body: <Put a body here>
token: ${{ secrets.GITHUB_TOKEN }}
draft: true
prerelease: false
files: |
artifacts/*/*

View File

@ -1,22 +0,0 @@
name: Validate MetaInfo
"on":
pull_request:
branches: ["master"]
paths:
- .github/workflows/validate-appstream-metainfo.yml
- Source/LoadByOS/LinuxConfigApp/com.getlibation.Libation.metainfo.xml
push:
branches: ["master"]
paths:
- .github/workflows/validate-appstream-metainfo.yml
- Source/LoadByOS/LinuxConfigApp/com.getlibation.Libation.metainfo.xml
jobs:
validate-appstream-metainfo:
runs-on: ubuntu-latest
container:
image: ghcr.io/flathub/flatpak-builder-lint:latest
steps:
- uses: actions/checkout@v5
- name: Check the MetaInfo file
run: flatpak-builder-lint appstream Source/LoadByOS/LinuxConfigApp/com.getlibation.Libation.metainfo.xml

View File

@ -1,21 +0,0 @@
name: Check desktop file
"on":
pull_request:
branches: ["master"]
paths:
- .github/workflows/validate-desktop-file.yml
- Source/LoadByOS/LinuxConfigApp/Libation.desktop
push:
branches: ["master"]
paths:
- .github/workflows/validate-desktop-file.yml
- Source/LoadByOS/LinuxConfigApp/Libation.desktop
jobs:
validate-desktop-file:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- run: sudo apt --yes install desktop-file-utils
- name: Check the desktop file
run: desktop-file-validate Source/LoadByOS/LinuxConfigApp/Libation.desktop

View File

@ -1,22 +0,0 @@
# validate.yml
# Validates that Libation will build on a pull request or push to master.
---
name: validate
on:
push:
branches: [master]
pull_request:
branches: [master]
jobs:
build:
uses: ./.github/workflows/build.yml
docker:
uses: ./.github/workflows/docker.yml
with:
version: ${GITHUB_SHA}
release: false
secrets:
docker_username: ${{ secrets.DOCKERHUB_USERNAME }}
docker_token: ${{ secrets.DOCKERHUB_TOKEN }}

74
.gitignore vendored
View File

@ -4,7 +4,6 @@
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
# User-specific files
*.rsuser
*.suo
*.user
*.userosscache
@ -13,9 +12,6 @@
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Mono auto generated files
mono_crash.*
# Build results
[Dd]ebug/
[Dd]ebugPublic/
@ -23,15 +19,10 @@ mono_crash.*
[Rr]eleases/
x64/
x86/
[Ww][Ii][Nn]32/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Oo]ut/
[Ll]og/
[Ll]ogs/
# Visual Studio 2015/2017 cache/options directory
.vs/
@ -45,10 +36,9 @@ Generated\ Files/
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUnit
# NUNIT
*.VisualState.xml
TestResult.xml
nunit-*.xml
# Build Results of an ATL Project
[Dd]ebugPS/
@ -62,9 +52,7 @@ BenchmarkDotNet.Artifacts/
project.lock.json
project.fragment.lock.json
artifacts/
# ASP.NET Scaffolding
ScaffoldingReadMe.txt
**/Properties/launchSettings.json
# StyleCop
StyleCopReport.xml
@ -72,7 +60,7 @@ StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*_i.h
*.ilk
*.meta
*.obj
@ -89,7 +77,6 @@ StyleCopReport.xml
*.tlh
*.tmp
*.tmp_proj
*_wpftmp.csproj
*.log
*.vspscc
*.vssscc
@ -132,6 +119,9 @@ _ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# JustCode is a .NET coding add-in
.JustCode
# TeamCity is a build add-in
_TeamCity*
@ -142,11 +132,6 @@ _TeamCity*
.axoCover/*
!.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
coverage*.json
coverage*.xml
coverage*.info
# Visual Studio code coverage results
*.coverage
*.coveragexml
@ -184,7 +169,7 @@ publish/
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
#*.pubxml
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
@ -194,8 +179,6 @@ PublishScripts/
# NuGet Packages
*.nupkg
# NuGet Symbol Packages
*.snupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
@ -220,14 +203,12 @@ BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
*.appxbundle
*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
!*.[Cc]ache/
# Others
ClientBin/
@ -240,7 +221,7 @@ ClientBin/
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
@ -271,9 +252,6 @@ ServiceFabricBackup/
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- [Bb]ackup.rdl
*- [Bb]ackup ([0-9]).rdl
*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
FakesAssemblies/
@ -309,8 +287,12 @@ paket-files/
# FAKE - F# Make
.fake/
# CodeRush personal settings
.cr/personal
# JetBrains Rider
.idea/
*.sln.iml
# CodeRush
.cr/
# Python Tools for Visual Studio (PTVS)
__pycache__/
@ -335,7 +317,7 @@ __pycache__/
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
@ -344,30 +326,10 @@ ASALocalRun/
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
# Ionide (cross platform F# VS Code tools) working folder
.ionide/
# Fody - auto-generated XML schema
FodyWeavers.xsd
### manually ignored files
# Windows shortcuts
*.lnk
# manually ignored files
/__TODO.txt
/DataLayer/LibationContext.db
*/bin-Avalonia

View File

@ -1,10 +0,0 @@
{
"WindowsClassic": "Classic-Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-win(?:dows)?-classic-x64\\.zip",
"WindowsAvalonia": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-win(?:dows)?-chardonnay-x64\\.zip",
"LinuxAvalonia": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-linux-chardonnay-amd64\\.deb",
"LinuxAvalonia_RPM": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-linux-chardonnay-amd64\\.rpm",
"MacOSAvalonia": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-macOS-chardonnay-x64\\.tgz",
"LinuxAvalonia_Arm64": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-linux-chardonnay-arm64\\.deb",
"LinuxAvalonia_Arm64_RPM": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-linux-chardonnay-arm64\\.rpm",
"MacOSAvalonia_Arm64": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-macOS-chardonnay-arm64\\.tgz"
}

32
.vscode/launch.json vendored
View File

@ -1,32 +0,0 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": ".NET Core Launch (console) Windows",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
"program": "${workspaceFolder}/Source/bin/Avalonia/Debug/Libation.dll",
"args": [],
"cwd": "${workspaceFolder}",
"stopAtEntry": false,
"console": "internalConsole"
},
{
"name": ".NET Core Launch (console) Linux",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build_linux",
"program": "${workspaceFolder}/Source/bin/Avalonia/Debug/Libation.dll",
"args": [],
"cwd": "${workspaceFolder}",
"stopAtEntry": false,
"console": "internalConsole"
}
]
}

59
.vscode/tasks.json vendored
View File

@ -1,59 +0,0 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "build",
"dependsOn": [
"build_libation",
"build_linuxconfigapp"
]
},
{
"label": "build_libation",
"type": "shell",
"command": "dotnet",
"args": [
"build",
"${workspaceFolder}/Source/LibationAvalonia/LibationAvalonia.csproj"
],
"group": "build",
"presentation": {
//"reveal": "silent"
},
"problemMatcher": "$msCompile"
},
{
"label": "build_linuxconfigapp",
"type": "shell",
"command": "dotnet",
"args": [
"build",
"${workspaceFolder}/Source/LoadByOS/LinuxConfigApp/LinuxConfigApp.csproj"
],
"group": "build",
"presentation": {
//"reveal": "silent"
},
"problemMatcher": "$msCompile"
},
{
"label": "build_linux",
"type": "shell",
"command": "dotnet",
"args": [
"build",
"${workspaceFolder}/Source/LibationAvalonia/LibationAvalonia.csproj",
"-p:TargetFramework=net9.0",
"-p:TargetFrameworks=net9.0",
"-p:RuntimeIdentifier=linux-x64"
],
"group": "build",
"presentation": {
//"reveal": "silent"
},
"problemMatcher": "$msCompile"
}
]
}

View File

@ -0,0 +1,123 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netstandard2.1</TargetFramework>
</PropertyGroup>
<ItemGroup>
<Reference Include="taglib-sharp">
<HintPath>lib\taglib-sharp.dll</HintPath>
</Reference>
</ItemGroup>
<ItemGroup>
<Folder Include="..\..\..\..\..\..\Dinah%2527s folder\coding\_NET\Visual Studio 2019\Libation\AaxDecrypter\UNTESTED\BytesCrackerLib\" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\Dinah.Core\Dinah.Core\Dinah.Core.csproj" />
</ItemGroup>
<ItemGroup>
<None Update="BytesCrackerLib\alglib1.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\audible_byte#4-4_0_10000x789935_0.rtc">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\audible_byte#4-4_1_10000x791425_0.rtc">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\audible_byte#4-4_2_10000x790991_0.rtc">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\audible_byte#4-4_3_10000x792120_0.rtc">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\audible_byte#4-4_4_10000x790743_0.rtc">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\audible_byte#4-4_5_10000x790568_0.rtc">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\audible_byte#4-4_6_10000x791458_0.rtc">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\audible_byte#4-4_7_10000x791707_0.rtc">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\audible_byte#4-4_8_10000x790202_0.rtc">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\audible_byte#4-4_9_10000x791022_0.rtc">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\ffmpeg.exe">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\ffprobe.exe">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\rcrack.exe">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\AtomicParsley.exe">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\avcodec-57.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\avdevice-57.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\avfilter-6.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\avformat-57.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\avutil-55.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\cygcrypto-1.0.0.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\cyggcc_s-1.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\cygmp4v2-2.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\cygstdc++-6.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\cygwin1.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\cygz.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\ffmpeg.exe">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\ffprobe.exe">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\mp4trackdump.exe">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\postproc-54.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\swresample-2.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\swscale-4.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\taglib-sharp.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,362 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Dinah.Core;
using Dinah.Core.Diagnostics;
using Dinah.Core.IO;
using Dinah.Core.StepRunner;
namespace AaxDecrypter
{
public interface ISimpleAaxToM4bConverter
{
event EventHandler<int> DecryptProgressUpdate;
bool Run();
string AppName { get; set; }
string inputFileName { get; }
byte[] coverBytes { get; }
string outDir { get; }
string outputFileName { get; }
Chapters chapters { get; }
Tags tags { get; }
EncodingInfo encodingInfo { get; }
void SetOutputFilename(string outFileName);
}
public interface IAdvancedAaxToM4bConverter : ISimpleAaxToM4bConverter
{
bool Step1_CreateDir();
bool Step2_DecryptAax();
bool Step3_Chapterize();
bool Step4_InsertCoverArt();
bool Step5_Cleanup();
bool Step6_AddTags();
bool End_CreateCue();
bool End_CreateNfo();
}
/// <summary>full c# app. integrated logging. no UI</summary>
public class AaxToM4bConverter : IAdvancedAaxToM4bConverter
{
public event EventHandler<int> DecryptProgressUpdate;
public string inputFileName { get; }
public string decryptKey { get; private set; }
private StepSequence steps { get; }
public byte[] coverBytes { get; private set; }
public string AppName { get; set; } = nameof(AaxToM4bConverter);
public string outDir { get; private set; }
public string outputFileName { get; private set; }
public Chapters chapters { get; private set; }
public Tags tags { get; private set; }
public EncodingInfo encodingInfo { get; private set; }
public static async Task<AaxToM4bConverter> CreateAsync(string inputFile, string decryptKey)
{
var converter = new AaxToM4bConverter(inputFile, decryptKey);
await converter.prelimProcessing();
converter.printPrelim();
return converter;
}
private AaxToM4bConverter(string inputFile, string decryptKey)
{
ArgumentValidator.EnsureNotNullOrWhiteSpace(inputFile, nameof(inputFile));
if (!File.Exists(inputFile))
throw new ArgumentNullException(nameof(inputFile), "File does not exist");
steps = new StepSequence
{
Name = "Convert Aax To M4b",
["Step 1: Create Dir"] = Step1_CreateDir,
["Step 2: Decrypt Aax"] = Step2_DecryptAax,
["Step 3: Chapterize and tag"] = Step3_Chapterize,
["Step 4: Insert Cover Art"] = Step4_InsertCoverArt,
["Step 5: Cleanup"] = Step5_Cleanup,
["Step 6: Add Tags"] = Step6_AddTags,
["End: Create Cue"] = End_CreateCue,
["End: Create Nfo"] = End_CreateNfo
};
inputFileName = inputFile;
this.decryptKey = decryptKey;
}
private async Task prelimProcessing()
{
tags = new Tags(inputFileName);
encodingInfo = new EncodingInfo(inputFileName);
chapters = new Chapters(inputFileName, tags.duration.TotalSeconds);
var defaultFilename = Path.Combine(
Path.GetDirectoryName(inputFileName),
getASCIITag(tags.author),
getASCIITag(tags.title) + ".m4b"
);
// set default name
SetOutputFilename(defaultFilename);
await Task.Run(() => saveCover(inputFileName));
}
private string getASCIITag(string property)
{
foreach (char ch in new string(Path.GetInvalidFileNameChars()) + new string(Path.GetInvalidPathChars()))
property = property.Replace(ch.ToString(), "");
return property;
}
private void saveCover(string aaxFile)
{
using var file = TagLib.File.Create(aaxFile, "audio/mp4", TagLib.ReadStyle.Average);
coverBytes = file.Tag.Pictures[0].Data.Data;
}
private void printPrelim()
{
Console.WriteLine("Audible Book ID = " + tags.id);
Console.WriteLine("Book: " + tags.title);
Console.WriteLine("Author: " + tags.author);
Console.WriteLine("Narrator: " + tags.narrator);
Console.WriteLine("Year: " + tags.year);
Console.WriteLine("Total Time: "
+ tags.duration.GetTotalTimeFormatted()
+ " in " + chapters.Count() + " chapters");
Console.WriteLine("WARNING-Source is "
+ encodingInfo.originalBitrate + " kbits @ "
+ encodingInfo.sampleRate + "Hz, "
+ encodingInfo.channels + " channels");
}
public bool Run()
{
var (IsSuccess, Elapsed) = steps.Run();
if (!IsSuccess)
{
Console.WriteLine("WARNING-Conversion failed");
return false;
}
var speedup = (int)(tags.duration.TotalSeconds / (long)Elapsed.TotalSeconds);
Console.WriteLine("Speedup is " + speedup + "x realtime.");
Console.WriteLine("Done");
return true;
}
public void SetOutputFilename(string outFileName)
{
outputFileName = outFileName;
if (Path.GetExtension(outputFileName) != ".m4b")
outputFileName = outputFileWithNewExt(".m4b");
if (File.Exists(outputFileName))
File.Delete(outputFileName);
outDir = Path.GetDirectoryName(outputFileName);
}
private string outputFileWithNewExt(string extension)
=> Path.Combine(outDir, Path.GetFileNameWithoutExtension(outputFileName) + '.' + extension.Trim('.'));
public bool Step1_CreateDir()
{
ProcessRunner.WorkingDir = outDir;
Directory.CreateDirectory(outDir);
return true;
}
public bool Step2_DecryptAax()
{
DecryptProgressUpdate?.Invoke(this, 0);
var tempRipFile = Path.Combine(outDir, "funny.aac");
var fail = "WARNING-Decrypt failure. ";
int returnCode;
if (string.IsNullOrWhiteSpace(decryptKey))
{
returnCode = getKey_decrypt(tempRipFile);
}
else
{
returnCode = decrypt(tempRipFile);
if (returnCode == -99)
{
Console.WriteLine($"{fail}Incorrect decrypt key: {decryptKey}");
decryptKey = null;
returnCode = getKey_decrypt(tempRipFile);
}
}
if (returnCode == 100)
Console.WriteLine($"{fail}Thread completed without changing return code. This shouldn't be possible");
else if (returnCode == 0)
{
// success!
FileExt.SafeMove(tempRipFile, outputFileWithNewExt(".mp4"));
DecryptProgressUpdate?.Invoke(this, 100);
return true;
}
else if (returnCode == -99)
Console.WriteLine($"{fail}Incorrect decrypt key: {decryptKey}");
else // any other returnCode
Console.WriteLine($"{fail}Unknown failure code: {returnCode}");
FileExt.SafeDelete(tempRipFile);
DecryptProgressUpdate?.Invoke(this, 0);
return false;
}
private int getKey_decrypt(string tempRipFile)
{
getKey();
return decrypt(tempRipFile);
}
private void getKey()
{
Console.WriteLine("Discovering decrypt key");
Console.WriteLine("Getting file hash");
var checksum = BytesCracker.GetChecksum(inputFileName);
Console.WriteLine("File hash calculated: " + checksum);
Console.WriteLine("Cracking activation bytes");
var activation_bytes = BytesCracker.GetActivationBytes(checksum);
decryptKey = activation_bytes;
Console.WriteLine("Activation bytes cracked. Decrypt key: " + activation_bytes);
}
private int decrypt(string tempRipFile)
{
FileExt.SafeDelete(tempRipFile);
Console.WriteLine("Decrypting with key " + decryptKey);
var returnCode = 100;
var thread = new Thread(() => returnCode = ngDecrypt());
thread.Start();
double fileLen = new FileInfo(inputFileName).Length;
while (thread.IsAlive && returnCode == 100)
{
Thread.Sleep(500);
if (File.Exists(tempRipFile))
{
double tempLen = new FileInfo(tempRipFile).Length;
var percentProgress = tempLen / fileLen * 100.0;
DecryptProgressUpdate?.Invoke(this, (int)percentProgress);
}
}
return returnCode;
}
private int ngDecrypt()
{
var info = new ProcessStartInfo
{
FileName = DecryptSupportLibraries.mp4trackdumpPath,
Arguments = "-c " + encodingInfo.channels + " -r " + encodingInfo.sampleRate + " \"" + inputFileName + "\""
};
info.EnvironmentVariables["VARIABLE"] = decryptKey;
var result = info.RunHidden();
// bad checksum -- bad decrypt key
if (result.Output.Contains("checksums mismatch, aborting!"))
return -99;
return result.ExitCode;
}
// temp file names for steps 3, 4, 5
string tempChapsGuid { get; } = Guid.NewGuid().ToString().ToUpper().Replace("-", "");
string tempChapsPath => Path.Combine(outDir, $"tempChaps_{tempChapsGuid}.mp4");
string mp4_file => outputFileWithNewExt(".mp4");
string ff_txt_file => mp4_file + ".ff.txt";
public bool Step3_Chapterize()
{
var str1 = "";
if (chapters.FirstChapterStart != 0.0)
{
str1 = " -ss " + chapters.FirstChapterStart.ToString("0.000", CultureInfo.InvariantCulture) + " -t " + (chapters.LastChapterStart - 1.0).ToString("0.000", CultureInfo.InvariantCulture) + " ";
}
var ffmpegTags = tags.GenerateFfmpegTags();
var ffmpegChapters = chapters.GenerateFfmpegChapters();
File.WriteAllText(ff_txt_file, ffmpegTags + ffmpegChapters);
var tagAndChapterInfo = new ProcessStartInfo
{
FileName = DecryptSupportLibraries.ffmpegPath,
Arguments = "-y -i \"" + mp4_file + "\" -f ffmetadata -i \"" + ff_txt_file + "\" -map_metadata 1 -bsf:a aac_adtstoasc -c:a copy" + str1 + " -map 0 \"" + tempChapsPath + "\""
};
tagAndChapterInfo.RunHidden();
return true;
}
public bool Step4_InsertCoverArt()
{
// save cover image as temp file
var coverPath = Path.Combine(outDir, "cover-" + Guid.NewGuid() + ".jpg");
FileExt.CreateFile(coverPath, coverBytes);
var insertCoverArtInfo = new ProcessStartInfo
{
FileName = DecryptSupportLibraries.atomicParsleyPath,
Arguments = "\"" + tempChapsPath + "\" --encodingTool \"" + AppName + "\" --artwork \"" + coverPath + "\" --overWrite"
};
insertCoverArtInfo.RunHidden();
// delete temp file
FileExt.SafeDelete(coverPath);
return true;
}
public bool Step5_Cleanup()
{
FileExt.SafeDelete(mp4_file);
FileExt.SafeDelete(ff_txt_file);
FileExt.SafeMove(tempChapsPath, outputFileName);
return true;
}
public bool Step6_AddTags()
{
tags.AddAppleTags(outputFileName);
return true;
}
public bool End_CreateCue()
{
File.WriteAllText(outputFileWithNewExt(".cue"), chapters.GetCuefromChapters(Path.GetFileName(outputFileName)));
return true;
}
public bool End_CreateNfo()
{
File.WriteAllText(outputFileWithNewExt(".nfo"), NFO.CreateNfoContents(AppName, tags, encodingInfo, chapters));
return true;
}
}
}

View File

@ -0,0 +1,52 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Text;
using System.Threading.Tasks;
using Dinah.Core;
using Dinah.Core.Diagnostics;
namespace AaxDecrypter
{
public static class BytesCracker
{
public static string GetChecksum(string aaxPath)
{
var info = new ProcessStartInfo
{
FileName = BytesCrackerSupportLibraries.ffprobePath,
Arguments = aaxPath.SurroundWithQuotes(),
WorkingDirectory = Directory.GetCurrentDirectory()
};
// checksum is in the debug info. ffprobe's debug info is written to stderr, not stdout
var ffprobeStderr = info.RunHidden().Error;
// example checksum line:
// ... [aax] file checksum == 0c527840c4f18517157eb0b4f9d6f9317ce60cd1
var checksum = ffprobeStderr.ExtractString("file checksum == ", 40);
return checksum;
}
/// <summary>use checksum to get activation bytes. activation bytes are unique per audible customer. only have to do this 1x/customer</summary>
public static string GetActivationBytes(string checksum)
{
var info = new ProcessStartInfo
{
FileName = BytesCrackerSupportLibraries.rcrackPath,
Arguments = @". -h " + checksum,
WorkingDirectory = Directory.GetCurrentDirectory()
};
var rcrackStdout = info.RunHidden().Output;
// example result
// 0c527840c4f18517157eb0b4f9d6f9317ce60cd1 \xbd\x89X\x09 hex:bd895809
var activation_bytes = rcrackStdout.ExtractString("hex:", 8);
return activation_bytes;
}
}
}

View File

@ -0,0 +1,28 @@
using System.IO;
namespace AaxDecrypter
{
public static class BytesCrackerSupportLibraries
{
// GetActivationBytes dependencies
// rcrack.exe
// alglib1.dll
// RainbowCrack files to recover your own Audible activation data (activation_bytes) in an offline manner
// audible_byte#4-4_0_10000x789935_0.rtc
// audible_byte#4-4_1_10000x791425_0.rtc
// audible_byte#4-4_2_10000x790991_0.rtc
// audible_byte#4-4_3_10000x792120_0.rtc
// audible_byte#4-4_4_10000x790743_0.rtc
// audible_byte#4-4_5_10000x790568_0.rtc
// audible_byte#4-4_6_10000x791458_0.rtc
// audible_byte#4-4_7_10000x791707_0.rtc
// audible_byte#4-4_8_10000x790202_0.rtc
// audible_byte#4-4_9_10000x791022_0.rtc
private static string appPath_ { get; } = Path.GetDirectoryName(Dinah.Core.Exe.FileLocationOnDisk);
private static string bytesCrackerLib_ { get; } = Path.Combine(appPath_, "BytesCrackerLib");
public static string ffprobePath { get; } = Path.Combine(bytesCrackerLib_, "ffprobe.exe");
public static string rcrackPath { get; } = Path.Combine(bytesCrackerLib_, "rcrack.exe");
}
}

View File

@ -0,0 +1,95 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.Linq;
using System.Text;
using Dinah.Core.Diagnostics;
namespace AaxDecrypter
{
public class Chapters
{
private List<double> markers { get; }
public double FirstChapterStart => markers[0];
public double LastChapterStart => markers[markers.Count - 1];
public Chapters(string file, double totalTime)
{
markers = getAAXChapters(file);
// add end time
markers.Add(totalTime);
}
private static List<double> getAAXChapters(string file)
{
var info = new ProcessStartInfo
{
FileName = DecryptSupportLibraries.ffprobePath,
Arguments = "-loglevel panic -show_chapters -print_format xml \"" + file + "\""
};
var xml = info.RunHidden().Output;
var xmlDocument = new System.Xml.XmlDocument();
xmlDocument.LoadXml(xml);
var chapters = xmlDocument.SelectNodes("/ffprobe/chapters/chapter")
.Cast<System.Xml.XmlNode>()
.Select(xmlNode => double.Parse(xmlNode.Attributes["start_time"].Value.Replace(",", "."), CultureInfo.InvariantCulture))
.ToList();
return chapters;
}
// subtract 1 b/c end time marker is a real entry but isn't a real chapter
public int Count() => markers.Count - 1;
public string GetCuefromChapters(string fileName)
{
var stringBuilder = new StringBuilder();
if (fileName != "")
{
stringBuilder.Append("FILE \"" + fileName + "\" MP4\n");
}
for (var i = 0; i < Count(); i++)
{
var chapter = i + 1;
var timeSpan = TimeSpan.FromSeconds(markers[i]);
var minutes = Math.Floor(timeSpan.TotalMinutes).ToString();
var seconds = timeSpan.Seconds.ToString("D2");
var milliseconds = (timeSpan.Milliseconds / 10).ToString("D2");
string str = minutes + ":" + seconds + ":" + milliseconds;
stringBuilder.Append("TRACK " + chapter + " AUDIO\n");
stringBuilder.Append(" TITLE \"Chapter " + chapter.ToString("D2") + "\"\n");
stringBuilder.Append(" INDEX 01 " + str + "\n");
}
return stringBuilder.ToString();
}
public string GenerateFfmpegChapters()
{
var stringBuilder = new StringBuilder();
for (var i = 0; i < Count(); i++)
{
var chapter = i + 1;
var start = markers[i] * 1000.0;
var end = markers[i + 1] * 1000.0;
var chapterName = chapter.ToString("D3");
stringBuilder.Append("[CHAPTER]\n");
stringBuilder.Append("TIMEBASE=1/1000\n");
stringBuilder.Append("START=" + start + "\n");
stringBuilder.Append("END=" + end + "\n");
stringBuilder.Append("title=" + chapterName + "\n");
}
return stringBuilder.ToString();
}
}
}

View File

@ -0,0 +1,21 @@
using System.IO;
namespace AaxDecrypter
{
public static class DecryptSupportLibraries
{
// OTHER EXTERNAL DEPENDENCIES
// ffprobe has these pre-req.s as I'm using it:
// avcodec-57.dll, avdevice-57.dll, avfilter-6.dll, avformat-57.dll, avutil-55.dll, postproc-54.dll, swresample-2.dll, swscale-4.dll, taglib-sharp.dll
//
// something else needs the cygwin files (cyg*.dll)
private static string appPath_ { get; } = Path.GetDirectoryName(Dinah.Core.Exe.FileLocationOnDisk);
private static string decryptLib_ { get; } = Path.Combine(appPath_, "DecryptLib");
public static string ffmpegPath { get; } = Path.Combine(decryptLib_, "ffmpeg.exe");
public static string ffprobePath { get; } = Path.Combine(decryptLib_, "ffprobe.exe");
public static string atomicParsleyPath { get; } = Path.Combine(decryptLib_, "AtomicParsley.exe");
public static string mp4trackdumpPath { get; } = Path.Combine(decryptLib_, "mp4trackdump.exe");
}
}

View File

@ -0,0 +1,41 @@
using System;
using System.Diagnostics;
using Dinah.Core.Diagnostics;
namespace AaxDecrypter
{
public class EncodingInfo
{
public int sampleRate { get; } = 44100;
public int channels { get; } = 2;
public int originalBitrate { get; }
public EncodingInfo(string file)
{
var info = new ProcessStartInfo
{
FileName = DecryptSupportLibraries.ffprobePath,
Arguments = "-loglevel panic -show_streams -print_format flat \"" + file + "\""
};
var end = info.RunHidden().Output;
foreach (string str2 in end.Split('\n'))
{
string[] strArray = str2.Split('=');
switch (strArray[0])
{
case "streams.stream.0.channels":
this.channels = int.Parse(strArray[1].Replace("\"", "").TrimEnd('\r', '\n'));
break;
case "streams.stream.0.sample_rate":
this.sampleRate = int.Parse(strArray[1].Replace("\"", "").TrimEnd('\r', '\n'));
break;
case "streams.stream.0.bit_rate":
string s = strArray[1].Replace("\"", "").TrimEnd('\r', '\n');
this.originalBitrate = (int)Math.Round(double.Parse(s) / 1000.0, MidpointRounding.AwayFromZero);
break;
}
}
}
}
}

View File

@ -0,0 +1,56 @@
namespace AaxDecrypter
{
public static class NFO
{
public static string CreateNfoContents(string ripper, Tags tags, EncodingInfo encodingInfo, Chapters chapters)
{
int _hours = (int)tags.duration.TotalHours;
string myDuration
= (_hours > 0 ? _hours + " hours, " : "")
+ tags.duration.Minutes + " minutes, "
+ tags.duration.Seconds + " seconds";
string str4
= "General Information\r\n"
+ "===================\r\n"
+ " Title: " + tags.title + "\r\n"
+ " Author: " + tags.author + "\r\n"
+ " Read By: " + tags.narrator + "\r\n"
+ " Copyright: " + tags.year + "\r\n"
+ " Audiobook Copyright: " + tags.year + "\r\n";
if (tags.genre != "")
{
str4 = str4 + " Genre: " + tags.genre + "\r\n";
}
string s
= str4
+ " Publisher: " + tags.publisher + "\r\n"
+ " Duration: " + myDuration + "\r\n"
+ " Chapters: " + chapters.Count() + "\r\n"
+ "\r\n"
+ "\r\n"
+ "Media Information\r\n"
+ "=================\r\n"
+ " Source Format: Audible AAX\r\n"
+ " Source Sample Rate: " + encodingInfo.sampleRate + " Hz\r\n"
+ " Source Channels: " + encodingInfo.channels + "\r\n"
+ " Source Bitrate: " + encodingInfo.originalBitrate + " kbits\r\n"
+ "\r\n"
+ " Lossless Encode: Yes\r\n"
+ " Encoded Codec: AAC / M4B\r\n"
+ " Encoded Sample Rate: " + encodingInfo.sampleRate + " Hz\r\n"
+ " Encoded Channels: " + encodingInfo.channels + "\r\n"
+ " Encoded Bitrate: " + encodingInfo.originalBitrate + " kbits\r\n"
+ "\r\n"
+ " Ripper: " + ripper + "\r\n"
+ "\r\n"
+ "\r\n"
+ "Book Description\r\n"
+ "================\r\n"
+ tags.comments;
return s;
}
}
}

View File

@ -0,0 +1,67 @@
using System;
using TagLib;
using TagLib.Mpeg4;
using Dinah.Core;
namespace AaxDecrypter
{
public class Tags
{
public string title { get; }
public string album { get; }
public string author { get; }
public string comments { get; }
public string narrator { get; }
public string year { get; }
public string publisher { get; }
public string id { get; }
public string genre { get; }
public TimeSpan duration { get; }
// input file
public Tags(string file)
{
using var tagLibFile = TagLib.File.Create(file, "audio/mp4", ReadStyle.Average);
title = tagLibFile.Tag.Title.Replace(" (Unabridged)", "");
album = tagLibFile.Tag.Album.Replace(" (Unabridged)", "");
author = tagLibFile.Tag.FirstPerformer ?? "[unknown]";
year = tagLibFile.Tag.Year.ToString();
comments = tagLibFile.Tag.Comment ?? "";
duration = tagLibFile.Properties.Duration;
genre = tagLibFile.Tag.FirstGenre ?? "";
var tag = tagLibFile.GetTag(TagTypes.Apple, true);
publisher = tag.Publisher ?? "";
narrator = string.IsNullOrWhiteSpace(tagLibFile.Tag.FirstComposer) ? tag.Narrator : tagLibFile.Tag.FirstComposer;
comments = !string.IsNullOrWhiteSpace(tag.LongDescription) ? tag.LongDescription : tag.Description;
id = tag.AudibleCDEK;
}
// my best guess of what this step is doing:
// re-publish the data we read from the input file => output file
public void AddAppleTags(string file)
{
using var tagLibFile = TagLib.File.Create(file, "audio/mp4", ReadStyle.Average);
var tag = (AppleTag)tagLibFile.GetTag(TagTypes.Apple, true);
tag.Publisher = publisher;
tag.LongDescription = comments;
tag.Description = comments;
tagLibFile.Save();
}
public string GenerateFfmpegTags()
=> $";FFMETADATA1"
+ $"\nmajor_brand=aax"
+ $"\nminor_version=1"
+ $"\ncompatible_brands=aax M4B mp42isom"
+ $"\ndate={year}"
+ $"\ngenre={genre}"
+ $"\ntitle={title}"
+ $"\nartist={author}"
+ $"\nalbum={album}"
+ $"\ncomposer={narrator}"
+ $"\ncomment={comments.Truncate(254)}"
+ $"\ndescription={comments}"
+ $"\n";
}
}

View File

@ -0,0 +1,20 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netstandard2.1</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="CsvHelper" Version="15.0.5" />
<PackageReference Include="NPOI" Version="2.5.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\audible api\AudibleApi\AudibleApiDTOs\AudibleApiDTOs.csproj" />
<ProjectReference Include="..\..\audible api\AudibleApi\AudibleApi\AudibleApi.csproj" />
<ProjectReference Include="..\DtoImporterService\DtoImporterService.csproj" />
<ProjectReference Include="..\InternalUtilities\InternalUtilities.csproj" />
<ProjectReference Include="..\LibationSearchEngine\LibationSearchEngine.csproj" />
</ItemGroup>
</Project>

View File

@ -0,0 +1,16 @@
using System;
using DataLayer;
using FileManager;
namespace ApplicationServices
{
public static class DbContexts
{
//// idea for future command/query separation
// public static LibationContext GetCommandContext() { }
// public static LibationContext GetQueryContext() { }
public static LibationContext GetContext()
=> LibationContext.Create(SqliteStorage.ConnectionString);
}
}

View File

@ -0,0 +1,108 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using AudibleApi;
using DataLayer;
using DtoImporterService;
using InternalUtilities;
using Serilog;
namespace ApplicationServices
{
public static class LibraryCommands
{
public static async Task<(int totalCount, int newCount)> ImportAccountAsync(Func<Account, ILoginCallback> loginCallbackFactoryFunc, params Account[] accounts)
{
if (accounts is null || accounts.Length == 0)
return (0, 0);
try
{
var importItems = await scanAccountsAsync(loginCallbackFactoryFunc, accounts);
var totalCount = importItems.Count;
Log.Logger.Information($"GetAllLibraryItems: Total count {totalCount}");
var newCount = await importIntoDbAsync(importItems);
Log.Logger.Information($"Import: New count {newCount}");
await Task.Run(() => SearchEngineCommands.FullReIndex());
Log.Logger.Information("FullReIndex: success");
return (totalCount, newCount);
}
catch (Exception ex)
{
Log.Logger.Error(ex, "Error importing library");
throw;
}
}
private static async Task<List<ImportItem>> scanAccountsAsync(Func<Account, ILoginCallback> loginCallbackFactoryFunc, Account[] accounts)
{
var tasks = new List<Task<List<ImportItem>>>();
foreach (var account in accounts)
{
var callback = loginCallbackFactoryFunc(account);
// get APIs in serial, esp b/c of logins
var api = await AudibleApiActions.GetApiAsync(callback, account);
// add scanAccountAsync as a TASK: do not await
tasks.Add(scanAccountAsync(api, account));
}
// import library in parallel
var arrayOfLists = await Task.WhenAll(tasks);
var importItems = arrayOfLists.SelectMany(a => a).ToList();
return importItems;
}
private static async Task<List<ImportItem>> scanAccountAsync(Api api, Account account)
{
Dinah.Core.ArgumentValidator.EnsureNotNull(account, nameof(account));
var localeName = account.Locale?.Name;
Log.Logger.Information("ImportLibraryAsync. {@DebugInfo}", new
{
account.AccountName,
account.AccountId,
LocaleName = localeName,
});
var dtoItems = await AudibleApiActions.GetLibraryValidatedAsync(api);
return dtoItems.Select(d => new ImportItem { DtoItem = d, AccountId = account.AccountId, LocaleName = localeName }).ToList();
}
private static async Task<int> importIntoDbAsync(List<ImportItem> importItems)
{
using var context = DbContexts.GetContext();
var libraryImporter = new LibraryImporter(context);
var newCount = await Task.Run(() => libraryImporter.Import(importItems));
context.SaveChanges();
return newCount;
}
public static int UpdateTags(this LibationContext context, Book book, string newTags)
{
try
{
book.UserDefinedItem.Tags = newTags;
var qtyChanges = context.SaveChanges();
if (qtyChanges > 0)
SearchEngineCommands.UpdateBookTags(book);
return qtyChanges;
}
catch (Exception ex)
{
Log.Logger.Error(ex, "Error updating tags");
throw;
}
}
}
}

View File

@ -0,0 +1,268 @@
using System;
using System.Collections.Generic;
using System.Linq;
using CsvHelper;
using CsvHelper.Configuration.Attributes;
using DataLayer;
using NPOI.XSSF.UserModel;
using Serilog;
namespace ApplicationServices
{
public class ExportDto
{
public static string GetName(string fieldName)
{
var property = typeof(ExportDto).GetProperty(fieldName);
var attribute = property.GetCustomAttributes(typeof(NameAttribute), true)[0];
var description = (NameAttribute)attribute;
var text = description.Names;
return text[0];
}
[Name("Account")]
public string Account { get; set; }
[Name("Date Added to library")]
public DateTime DateAdded { get; set; }
[Name("Audible Product Id")]
public string AudibleProductId { get; set; }
[Name("Locale")]
public string Locale { get; set; }
[Name("Title")]
public string Title { get; set; }
[Name("Authors")]
public string AuthorNames { get; set; }
[Name("Narrators")]
public string NarratorNames { get; set; }
[Name("Length In Minutes")]
public int LengthInMinutes { get; set; }
[Name("Publisher")]
public string Publisher { get; set; }
[Name("Pdf url")]
public string PdfUrl { get; set; }
[Name("Series Names")]
public string SeriesNames { get; set; }
[Name("Series Order")]
public string SeriesOrder { get; set; }
[Name("Community Rating: Overall")]
public float? CommunityRatingOverall { get; set; }
[Name("Community Rating: Performance")]
public float? CommunityRatingPerformance { get; set; }
[Name("Community Rating: Story")]
public float? CommunityRatingStory { get; set; }
[Name("Cover Id")]
public string PictureId { get; set; }
[Name("Is Abridged?")]
public bool IsAbridged { get; set; }
[Name("Date Published")]
public DateTime? DatePublished { get; set; }
[Name("Categories")]
public string CategoriesNames { get; set; }
[Name("My Rating: Overall")]
public float? MyRatingOverall { get; set; }
[Name("My Rating: Performance")]
public float? MyRatingPerformance { get; set; }
[Name("My Rating: Story")]
public float? MyRatingStory { get; set; }
[Name("My Libation Tags")]
public string MyLibationTags { get; set; }
}
public static class LibToDtos
{
public static List<ExportDto> ToDtos(this IEnumerable<LibraryBook> library)
=> library.Select(a => new ExportDto
{
Account = a.Account,
DateAdded = a.DateAdded,
AudibleProductId = a.Book.AudibleProductId,
Locale = a.Book.Locale,
Title = a.Book.Title,
AuthorNames = a.Book.AuthorNames,
NarratorNames = a.Book.NarratorNames,
LengthInMinutes = a.Book.LengthInMinutes,
Publisher = a.Book.Publisher,
PdfUrl = a.Book.Supplements?.FirstOrDefault()?.Url,
SeriesNames = a.Book.SeriesNames,
SeriesOrder = a.Book.SeriesLink.Any() ? a.Book.SeriesLink?.Select(sl => $"{sl.Index} : {sl.Series.Name}").Aggregate((a, b) => $"{a}, {b}") : "",
CommunityRatingOverall = a.Book.Rating?.OverallRating,
CommunityRatingPerformance = a.Book.Rating?.PerformanceRating,
CommunityRatingStory = a.Book.Rating?.StoryRating,
PictureId = a.Book.PictureId,
IsAbridged = a.Book.IsAbridged,
DatePublished = a.Book.DatePublished,
CategoriesNames = a.Book.CategoriesNames.Any() ? a.Book.CategoriesNames.Aggregate((a, b) => $"{a}, {b}") : "",
MyRatingOverall = a.Book.UserDefinedItem.Rating.OverallRating,
MyRatingPerformance = a.Book.UserDefinedItem.Rating.PerformanceRating,
MyRatingStory = a.Book.UserDefinedItem.Rating.StoryRating,
MyLibationTags = a.Book.UserDefinedItem.Tags
}).ToList();
}
public static class LibraryExporter
{
public static void ToCsv(string saveFilePath)
{
using var context = DbContexts.GetContext();
var dtos = context.GetLibrary_Flat_NoTracking().ToDtos();
if (!dtos.Any())
return;
using var writer = new System.IO.StreamWriter(saveFilePath);
using var csv = new CsvWriter(writer, System.Globalization.CultureInfo.CurrentCulture);
csv.WriteHeader(typeof(ExportDto));
csv.NextRecord();
csv.WriteRecords(dtos);
}
public static void ToJson(string saveFilePath)
{
using var context = DbContexts.GetContext();
var dtos = context.GetLibrary_Flat_NoTracking().ToDtos();
var json = Newtonsoft.Json.JsonConvert.SerializeObject(dtos, Newtonsoft.Json.Formatting.Indented);
System.IO.File.WriteAllText(saveFilePath, json);
}
public static void ToXlsx(string saveFilePath)
{
using var context = DbContexts.GetContext();
var dtos = context.GetLibrary_Flat_NoTracking().ToDtos();
var workbook = new XSSFWorkbook();
var sheet = workbook.CreateSheet("Library");
var detailSubtotalFont = workbook.CreateFont();
detailSubtotalFont.IsBold = true;
var detailSubtotalCellStyle = workbook.CreateCellStyle();
detailSubtotalCellStyle.SetFont(detailSubtotalFont);
// headers
var rowIndex = 0;
var row = sheet.CreateRow(rowIndex);
var columns = new[] {
nameof (ExportDto.Account),
nameof (ExportDto.DateAdded),
nameof (ExportDto.AudibleProductId),
nameof (ExportDto.Locale),
nameof (ExportDto.Title),
nameof (ExportDto.AuthorNames),
nameof (ExportDto.NarratorNames),
nameof (ExportDto.LengthInMinutes),
nameof (ExportDto.Publisher),
nameof (ExportDto.PdfUrl),
nameof (ExportDto.SeriesNames),
nameof (ExportDto.SeriesOrder),
nameof (ExportDto.CommunityRatingOverall),
nameof (ExportDto.CommunityRatingPerformance),
nameof (ExportDto.CommunityRatingStory),
nameof (ExportDto.PictureId),
nameof (ExportDto.IsAbridged),
nameof (ExportDto.DatePublished),
nameof (ExportDto.CategoriesNames),
nameof (ExportDto.MyRatingOverall),
nameof (ExportDto.MyRatingPerformance),
nameof (ExportDto.MyRatingStory),
nameof (ExportDto.MyLibationTags)
};
var col = 0;
foreach (var c in columns)
{
var cell = row.CreateCell(col++);
var name = ExportDto.GetName(c);
cell.SetCellValue(name);
cell.CellStyle = detailSubtotalCellStyle;
}
var dateFormat = workbook.CreateDataFormat();
var dateStyle = workbook.CreateCellStyle();
dateStyle.DataFormat = dateFormat.GetFormat("MM/dd/yyyy HH:mm:ss");
rowIndex++;
// Add data rows
foreach (var dto in dtos)
{
col = 0;
row = sheet.CreateRow(rowIndex);
row.CreateCell(col++).SetCellValue(dto.Account);
var dateAddedCell = row.CreateCell(col++);
dateAddedCell.CellStyle = dateStyle;
dateAddedCell.SetCellValue(dto.DateAdded);
row.CreateCell(col++).SetCellValue(dto.AudibleProductId);
row.CreateCell(col++).SetCellValue(dto.Locale);
row.CreateCell(col++).SetCellValue(dto.Title);
row.CreateCell(col++).SetCellValue(dto.AuthorNames);
row.CreateCell(col++).SetCellValue(dto.NarratorNames);
row.CreateCell(col++).SetCellValue(dto.LengthInMinutes);
row.CreateCell(col++).SetCellValue(dto.Publisher);
row.CreateCell(col++).SetCellValue(dto.PdfUrl);
row.CreateCell(col++).SetCellValue(dto.SeriesNames);
row.CreateCell(col++).SetCellValue(dto.SeriesOrder);
col = createCell(row, col, dto.CommunityRatingOverall);
col = createCell(row, col, dto.CommunityRatingPerformance);
col = createCell(row, col, dto.CommunityRatingStory);
row.CreateCell(col++).SetCellValue(dto.PictureId);
row.CreateCell(col++).SetCellValue(dto.IsAbridged);
var datePubCell = row.CreateCell(col++);
datePubCell.CellStyle = dateStyle;
if (dto.DatePublished.HasValue)
datePubCell.SetCellValue(dto.DatePublished.Value);
else
datePubCell.SetCellValue("");
row.CreateCell(col++).SetCellValue(dto.CategoriesNames);
col = createCell(row, col, dto.MyRatingOverall);
col = createCell(row, col, dto.MyRatingPerformance);
col = createCell(row, col, dto.MyRatingStory);
row.CreateCell(col++).SetCellValue(dto.MyLibationTags);
rowIndex++;
}
using var fileData = new System.IO.FileStream(saveFilePath, System.IO.FileMode.Create);
workbook.Write(fileData);
}
private static int createCell(NPOI.SS.UserModel.IRow row, int col, float? nullableFloat)
{
if (nullableFloat.HasValue)
row.CreateCell(col++).SetCellValue(nullableFloat.Value);
else
row.CreateCell(col++).SetCellValue("");
return col;
}
}
}

View File

@ -0,0 +1,43 @@
using System.IO;
using DataLayer;
using LibationSearchEngine;
namespace ApplicationServices
{
public static class SearchEngineCommands
{
public static void FullReIndex()
{
var engine = new SearchEngine(DbContexts.GetContext());
engine.CreateNewIndex();
}
public static SearchResultSet Search(string searchString)
{
var engine = new SearchEngine(DbContexts.GetContext());
try
{
return engine.Search(searchString);
}
catch (FileNotFoundException)
{
FullReIndex();
return engine.Search(searchString);
}
}
public static void UpdateBookTags(Book book)
{
var engine = new SearchEngine(DbContexts.GetContext());
try
{
engine.UpdateTags(book.AudibleProductId, book.UserDefinedItem.Tags);
}
catch (FileNotFoundException)
{
FullReIndex();
engine.UpdateTags(book.AudibleProductId, book.UserDefinedItem.Tags);
}
}
}
}

View File

@ -1,38 +1,36 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net9.0</TargetFramework>
<TargetFrameworks>netcoreapp3.1;netstandard2.1</TargetFrameworks>
</PropertyGroup>
<PropertyGroup>
<GenerateRuntimeConfigurationFiles>true</GenerateRuntimeConfigurationFiles>
<ApplicationIcon />
<OutputType>Library</OutputType>
<StartupObject />
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Dinah.Core" Version="9.0.3.1" />
<PackageReference Include="Dinah.EntityFrameworkCore" Version="9.0.0.1" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.8">
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="3.1.7">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="9.0.8" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Tools" Version="9.0.8">
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="3.1.7" />
<PackageReference Include="Microsoft.EntityFrameworkCore.SqlServer" Version="3.1.7" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Tools" Version="3.1.7">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
<DebugType>embedded</DebugType>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
<DebugType>embedded</DebugType>
</PropertyGroup>
<ItemGroup>
<None Update="migrate.json">
<ProjectReference Include="..\..\Dinah.Core\Dinah.EntityFrameworkCore\Dinah.EntityFrameworkCore.csproj" />
<ProjectReference Include="..\FileManager\FileManager.csproj" />
</ItemGroup>
<ItemGroup>
<None Update="appsettings.json">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>

View File

@ -3,20 +3,18 @@ using System;
using DataLayer;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace DataLayer.Migrations
{
[DbContext(typeof(LibationContext))]
[Migration("20210622205558_RemoveAaxcDecryptionKeys")]
partial class RemoveAaxcDecryptionKeys
partial class LibationContextModelSnapshot : ModelSnapshot
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
protected override void BuildModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "5.0.5");
.HasAnnotation("ProductVersion", "3.1.7");
modelBuilder.Entity("DataLayer.Book", b =>
{
@ -246,8 +244,6 @@ namespace DataLayer.Migrations
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.Navigation("Book");
});
b.OwnsOne("DataLayer.UserDefinedItem", "UserDefinedItem", b1 =>
@ -286,19 +282,7 @@ namespace DataLayer.Migrations
b2.WithOwner()
.HasForeignKey("UserDefinedItemBookId");
});
b1.Navigation("Book");
b1.Navigation("Rating");
});
b.Navigation("Category");
b.Navigation("Rating");
b.Navigation("Supplements");
b.Navigation("UserDefinedItem");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
@ -314,10 +298,6 @@ namespace DataLayer.Migrations
.HasForeignKey("ContributorId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Contributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
@ -325,8 +305,6 @@ namespace DataLayer.Migrations
b.HasOne("DataLayer.Category", "ParentCategory")
.WithMany()
.HasForeignKey("ParentCategoryCategoryId");
b.Navigation("ParentCategory");
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
@ -336,8 +314,6 @@ namespace DataLayer.Migrations
.HasForeignKey("DataLayer.LibraryBook", "BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
@ -353,27 +329,6 @@ namespace DataLayer.Migrations
.HasForeignKey("SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Series");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Navigation("ContributorsLink");
b.Navigation("SeriesLink");
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Navigation("BooksLink");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Navigation("BooksLink");
});
#pragma warning restore 612, 618
}

View File

@ -0,0 +1,71 @@
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Builders;
namespace DataLayer.Configurations
{
internal class BookConfig : IEntityTypeConfiguration<Book>
{
public void Configure(EntityTypeBuilder<Book> entity)
{
entity.HasKey(b => b.BookId);
entity.HasIndex(b => b.AudibleProductId);
entity.OwnsOne(b => b.Rating);
//
// CRUCIAL: ignore unmapped collections, even get-only
//
entity.Ignore(nameof(Book.Authors));
entity.Ignore(nameof(Book.Narrators));
//// these don't seem to matter
//entity.Ignore(nameof(Book.AuthorNames));
//entity.Ignore(nameof(Book.NarratorNames));
//entity.Ignore(nameof(Book.HasPdfs));
// OwnsMany: "Can only ever appear on navigation properties of other entity types.
// Are automatically loaded, and can only be tracked by a DbContext alongside their owner."
entity
.OwnsMany(b => b.Supplements, b_s =>
{
b_s.WithOwner(s => s.Book)
.HasForeignKey(s => s.BookId);
b_s.HasKey(s => s.SupplementId);
});
// even though it's owned, we need to map its backing field
entity
.Metadata
.FindNavigation(nameof(Book.Supplements))
.SetPropertyAccessMode(PropertyAccessMode.Field);
// owns it 1:1, store in separate table
entity
.OwnsOne(b => b.UserDefinedItem, b_udi =>
{
b_udi.WithOwner(udi => udi.Book)
.HasForeignKey(udi => udi.BookId);
b_udi.Property(udi => udi.BookId).ValueGeneratedNever();
b_udi.ToTable(nameof(Book.UserDefinedItem));
// owns it 1:1, store in same table
b_udi.OwnsOne(udi => udi.Rating);
});
entity
.Metadata
.FindNavigation(nameof(Book.ContributorsLink))
// PropertyAccessMode.Field : Contributions is a get-only property, not a field, so use its backing field
.SetPropertyAccessMode(PropertyAccessMode.Field);
entity
.Metadata
.FindNavigation(nameof(Book.SeriesLink))
// PropertyAccessMode.Field : Series is a get-only property, not a field, so use its backing field
.SetPropertyAccessMode(PropertyAccessMode.Field);
entity
.HasOne(b => b.Category)
.WithMany()
.HasForeignKey(b => b.CategoryId);
}
}
}

View File

@ -9,8 +9,8 @@ namespace DataLayer.Configurations
{
entity.HasKey(bc => new { bc.BookId, bc.ContributorId, bc.Role });
entity.HasIndex(bc => bc.BookId);
entity.HasIndex(bc => bc.ContributorId);
entity.HasIndex(b => b.BookId);
entity.HasIndex(b => b.ContributorId);
entity
.HasOne(bc => bc.Book)

View File

@ -9,12 +9,6 @@ namespace DataLayer.Configurations
{
entity.HasKey(c => c.CategoryId);
entity.HasIndex(c => c.AudibleCategoryId);
entity.Ignore(c => c.CategoryLadders);
entity
.HasMany(e => e._categoryLadders)
.WithMany(e => e._categories);
}
}
}

View File

@ -17,9 +17,6 @@ namespace DataLayer.Configurations
.Metadata
.FindNavigation(nameof(Contributor.BooksLink))
.SetPropertyAccessMode(PropertyAccessMode.Field);
// seeds go here. examples in Dinah.EntityFrameworkCore.Tests\DbContextFactoryExample.cs
entity.HasData(Contributor.GetEmpty());
}
}
}

View File

@ -0,0 +1,18 @@
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Builders;
namespace DataLayer.Configurations
{
internal class LibraryBookConfig : IEntityTypeConfiguration<LibraryBook>
{
public void Configure(EntityTypeBuilder<LibraryBook> entity)
{
entity.HasKey(b => b.BookId);
entity
.HasOne(le => le.Book)
.WithOne()
.HasForeignKey<LibraryBook>(le => le.BookId);
}
}
}

View File

@ -7,10 +7,10 @@ namespace DataLayer.Configurations
{
public void Configure(EntityTypeBuilder<SeriesBook> entity)
{
entity.HasKey(sb => new { sb.SeriesId, sb.BookId });
entity.HasKey(bc => new { bc.SeriesId, bc.BookId });
entity.HasIndex(sb => sb.SeriesId);
entity.HasIndex(sb => sb.BookId);
entity.HasIndex(b => b.SeriesId);
entity.HasIndex(b => b.BookId);
entity
.HasOne(sb => sb.Series)

View File

@ -7,8 +7,8 @@ namespace DataLayer.Configurations
{
public void Configure(EntityTypeBuilder<Series> entity)
{
entity.HasKey(s => s.SeriesId);
entity.HasIndex(s => s.AudibleSeriesId);
entity.HasKey(b => b.SeriesId);
entity.HasIndex(b => b.AudibleSeriesId);
entity
.Metadata

View File

@ -1,6 +1,5 @@
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using Dinah.Core;
using Microsoft.EntityFrameworkCore;
@ -16,17 +15,6 @@ namespace DataLayer
Id = id;
}
}
// enum will be easier than bool to extend later.
public enum ContentType
{
Unknown = 0,
Product = 1,
Episode = 2,
Parent = 4,
}
public class Book
{
// implementation detail. set by db only. only used by data layer
@ -35,23 +23,30 @@ namespace DataLayer
// immutable
public string AudibleProductId { get; private set; }
public string Title { get; private set; }
public string Subtitle { get; private set; }
private string _titleWithSubtitle;
public string TitleWithSubtitle => _titleWithSubtitle ??= string.IsNullOrEmpty(Subtitle) ? Title : $"{Title}: {Subtitle}";
public string Description { get; private set; }
public int LengthInMinutes { get; private set; }
public ContentType ContentType { get; private set; }
// immutable-ish. should be immutable. mutability is necessary for v3 => v4 upgrades
public string Locale { get; private set; }
// mutable
public string PictureId { get; set; }
public string PictureLarge { get; set; }
// book details
public bool IsAbridged { get; private set; }
public bool IsSpatial { get; private set; }
public DateTime? DatePublished { get; private set; }
public string Language { get; private set; }
// non-null. use "empty pattern"
internal int CategoryId { get; private set; }
public Category Category { get; private set; }
public string[] CategoriesNames
=> Category == null ? new string[0]
: Category.ParentCategory == null ? new[] { Category.Name }
: new[] { Category.ParentCategory.Name, Category.Name };
public string[] CategoriesIds
=> Category == null ? null
: Category.ParentCategory == null ? new[] { Category.AudibleCategoryId }
: new[] { Category.ParentCategory.AudibleCategoryId, Category.AudibleCategoryId };
// is owned, not optional 1:1
public UserDefinedItem UserDefinedItem { get; private set; }
@ -67,20 +62,18 @@ namespace DataLayer
public Book(
AudibleProductId audibleProductId,
string title,
string subtitle,
string description,
int lengthInMinutes,
ContentType contentType,
IEnumerable<Contributor> authors,
IEnumerable<Contributor> narrators,
string localeName)
IEnumerable<Contributor> authors,
IEnumerable<Contributor> narrators,
Category category, string localeName)
{
// validate
ArgumentValidator.EnsureNotNull(audibleProductId, nameof(audibleProductId));
var productId = audibleProductId.Id;
ArgumentValidator.EnsureNotNullOrWhiteSpace(productId, nameof(productId));
// assign as soon as possible. stuff below relies on this
// assign as soon as possible. stuff below relies on this
AudibleProductId = productId;
Locale = localeName;
@ -88,38 +81,40 @@ namespace DataLayer
// non-ef-ctor init.s
UserDefinedItem = new UserDefinedItem(this);
ContributorsLink = new HashSet<BookContributor>();
CategoriesLink = new HashSet<BookCategory>();
_contributorsLink = new HashSet<BookContributor>();
_seriesLink = new HashSet<SeriesBook>();
_supplements = new HashSet<Supplement>();
Category = category;
// simple assigns
UpdateTitle(title, subtitle);
Description = description?.Trim() ?? "";
Title = title;
Description = description;
LengthInMinutes = lengthInMinutes;
ContentType = contentType;
// assigns with biz logic
ReplaceAuthors(authors);
ReplaceNarrators(narrators);
}
}
public void UpdateTitle(string title, string subtitle)
{
Title = title?.Trim() ?? "";
Subtitle = subtitle?.Trim() ?? "";
_titleWithSubtitle = null;
}
#region contributors, authors, narrators
// use uninitialised backing fields - this means we can detect if the collection was loaded
private HashSet<BookContributor> _contributorsLink;
// i'd like this to be internal but migration throws this exception when i try:
// Value cannot be null.
// Parameter name: property
public IEnumerable<BookContributor> ContributorsLink
=> _contributorsLink?
.OrderBy(bc => bc.Order)
.ToList();
public void UpdateLengthInMinutes(int lengthInMinutes)
=> LengthInMinutes = lengthInMinutes;
public IEnumerable<Contributor> Authors => getContributions(Role.Author).Select(bc => bc.Contributor).ToList();
public string AuthorNames => string.Join(", ", Authors.Select(a => a.Name));
#region contributors, authors, narrators
internal HashSet<BookContributor> ContributorsLink { get; private set; }
public IEnumerable<Contributor> Narrators => getContributions(Role.Narrator).Select(bc => bc.Contributor).ToList();
public string NarratorNames => string.Join(", ", Narrators.Select(n => n.Name));
public IEnumerable<Contributor> Authors => ContributorsLink.ByRole(Role.Author).Select(bc => bc.Contributor).ToList();
public IEnumerable<Contributor> Narrators => ContributorsLink.ByRole(Role.Narrator).Select(bc => bc.Contributor).ToList();
public string Publisher => ContributorsLink.ByRole(Role.Publisher).SingleOrDefault()?.Contributor.Name;
public string Publisher => getContributions(Role.Publisher).SingleOrDefault()?.Contributor.Name;
public void ReplaceAuthors(IEnumerable<Contributor> authors, DbContext context = null)
=> replaceContributors(authors, Role.Author, context);
@ -132,73 +127,71 @@ namespace DataLayer
ArgumentValidator.EnsureEnumerableNotNullOrEmpty(newContributors, nameof(newContributors));
// the edge cases of doing local-loaded vs remote-only got weird. just load it
if (ContributorsLink is null)
getEntry(context).Collection(s => s.ContributorsLink).Load();
var isIdentical
= ContributorsLink
.ByRole(role)
.Select(c => c.Contributor)
.SequenceEqual(newContributors);
if (_contributorsLink is null)
getEntry(context).Collection(s => s.ContributorsLink).Load();
var roleContributions = getContributions(role);
var isIdentical = roleContributions.Select(c => c.Contributor).SequenceEqual(newContributors);
if (isIdentical)
return;
ContributorsLink.RemoveWhere(bc => bc.Role == role);
_contributorsLink.RemoveWhere(bc => bc.Role == role);
addNewContributors(newContributors, role);
}
private void addNewContributors(IEnumerable<Contributor> newContributors, Role role)
private void addNewContributors(IEnumerable<Contributor> newContributors, Role role)
{
byte order = 0;
var newContributionsEnum = newContributors.Select(c => new BookContributor(this, c, role, order++));
var newContributions = new HashSet<BookContributor>(newContributionsEnum);
ContributorsLink.UnionWith(newContributions);
_contributorsLink.UnionWith(newContributions);
}
private List<BookContributor> getContributions(Role role)
=> ContributorsLink
.Where(a => a.Role == role)
.OrderBy(a => a.Order)
.ToList();
#endregion
private Microsoft.EntityFrameworkCore.ChangeTracking.EntityEntry<Book> getEntry(DbContext context)
{
ArgumentValidator.EnsureNotNull(context, nameof(context));
private Microsoft.EntityFrameworkCore.ChangeTracking.EntityEntry<Book> getEntry(DbContext context)
{
ArgumentValidator.EnsureNotNull(context, nameof(context));
var entry = context.Entry(this);
var entry = context.Entry(this);
if (!entry.IsKeySet)
throw new InvalidOperationException("Could not load a valid Book from database");
if (!entry.IsKeySet)
throw new InvalidOperationException("Could not load a valid Book from database");
return entry;
}
return entry;
}
#region categories
internal HashSet<BookCategory> CategoriesLink { get; private set; }
private ReadOnlyCollection<BookCategory> _categoriesReadOnly;
public ReadOnlyCollection<BookCategory> Categories
#region series
private HashSet<SeriesBook> _seriesLink;
public IEnumerable<SeriesBook> SeriesLink => _seriesLink?.ToList();
public string SeriesNames
{
get
{
if (_categoriesReadOnly?.SequenceEqual(CategoriesLink) is not true)
_categoriesReadOnly = CategoriesLink.ToList().AsReadOnly();
return _categoriesReadOnly;
// first: alphabetical by name
var withNames = _seriesLink
.Where(s => !string.IsNullOrWhiteSpace(s.Series.Name))
.Select(s => s.Series.Name)
.OrderBy(a => a)
.ToList();
// then un-named are alpha by series id
var nullNames = _seriesLink
.Where(s => string.IsNullOrWhiteSpace(s.Series.Name))
.Select(s => s.Series.AudibleSeriesId)
.OrderBy(a => a)
.ToList();
var all = withNames.Union(nullNames).ToList();
return string.Join(", ", all);
}
}
public void SetCategoryLadders(IEnumerable<CategoryLadder> ladders)
{
ArgumentValidator.EnsureNotNull(ladders, nameof(ladders));
//Replace all existing category ladders.
//Some books make have duplicate ladders
CategoriesLink.Clear();
CategoriesLink.UnionWith(ladders.Distinct().Select(l => new BookCategory(this, l)));
}
#endregion
#region series
private HashSet<SeriesBook> _seriesLink;
public IEnumerable<SeriesBook> SeriesLink => _seriesLink?.ToList();
public void UpsertSeries(Series series, string order, DbContext context = null)
public void UpsertSeries(Series series, float? index = null, DbContext context = null)
{
ArgumentValidator.EnsureNotNull(series, nameof(series));
@ -208,16 +201,17 @@ namespace DataLayer
getEntry(context).Collection(s => s.SeriesLink).Load();
var singleSeriesBook = _seriesLink.SingleOrDefault(sb => sb.Series == series);
if (singleSeriesBook is null)
_seriesLink.Add(new SeriesBook(series, this, order));
if (singleSeriesBook == null)
_seriesLink.Add(new SeriesBook(series, this, index));
else
singleSeriesBook.UpdateOrder(order);
singleSeriesBook.UpdateIndex(index);
}
#endregion
#region supplements
private HashSet<Supplement> _supplements;
public IEnumerable<Supplement> Supplements => _supplements?.ToList();
public bool HasPdf => Supplements.Any();
public void AddSupplementDownloadUrl(string url)
{
@ -226,26 +220,34 @@ namespace DataLayer
ArgumentValidator.EnsureNotNullOrWhiteSpace(url, nameof(url));
if (_supplements.Any(s => url.EqualsInsensitive(url)))
return;
_supplements.Add(new Supplement(this, url));
UserDefinedItem.PdfStatus ??= LiberatedStatus.NotLiberated;
if (!_supplements.Any(s => url.EqualsInsensitive(url)))
_supplements.Add(new Supplement(this, url));
}
#endregion
public void UpdateProductRating(float overallRating, float performanceRating, float storyRating)
=> Rating.Update(overallRating, performanceRating, storyRating);
public void UpdateBookDetails(bool isAbridged, bool? isSpatial, DateTime? datePublished, string language)
public void UpdateBookDetails(bool isAbridged, DateTime? datePublished)
{
// don't overwrite with default values
IsAbridged |= isAbridged;
IsSpatial = isSpatial ?? IsSpatial;
DatePublished = datePublished ?? DatePublished;
Language = language?.FirstCharToUpper() ?? Language;
}
public override string ToString() => $"[{AudibleProductId}] {TitleWithSubtitle}";
public void UpdateCategory(Category category, DbContext context = null)
{
// since category is never null, nullity means it hasn't been loaded
if (Category is null)
getEntry(context).Reference(s => s.Category).Load();
Category = category;
}
// needed for v3 => v4 upgrade
public void UpdateLocale(string localeName)
=> Locale ??= localeName;
public override string ToString() => $"[{AudibleProductId}] {Title}";
}
}

View File

@ -2,8 +2,6 @@
namespace DataLayer
{
public enum Role { Author = 1, Narrator = 2, Publisher = 3 }
public class BookContributor
{
internal int BookId { get; private set; }

View File

@ -1,9 +1,9 @@
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System;
using System.Collections.Generic;
using System.Linq;
using Dinah.Core;
using Microsoft.EntityFrameworkCore;
#nullable enable
namespace DataLayer
{
public class AudibleCategoryId
@ -15,29 +15,20 @@ namespace DataLayer
Id = id;
}
}
public class Category
{
// Empty is a special case. use private ctor w/o validation
public static Category GetEmpty() => new Category { CategoryId = -1, AudibleCategoryId = "", Name = "" };
internal int CategoryId { get; private set; }
public string? AudibleCategoryId { get; private set; }
public string AudibleCategoryId { get; private set; }
public string? Name { get; internal set; }
public string Name { get; private set; }
public Category ParentCategory { get; private set; }
internal List<CategoryLadder> _categoryLadders = new();
private ReadOnlyCollection<CategoryLadder>? _categoryLaddersReadOnly;
public ReadOnlyCollection<CategoryLadder> CategoryLadders
{
get
{
if (_categoryLaddersReadOnly?.SequenceEqual(_categoryLadders) is not true)
_categoryLaddersReadOnly = _categoryLadders.AsReadOnly();
return _categoryLaddersReadOnly;
}
}
private Category() { }
private Category() { }
/// <summary>special id class b/c it's too easy to get string order mixed up</summary>
public Category(AudibleCategoryId audibleSeriesId, string name)
public Category(AudibleCategoryId audibleSeriesId, string name, Category parentCategory = null)
{
ArgumentValidator.EnsureNotNull(audibleSeriesId, nameof(audibleSeriesId));
var id = audibleSeriesId.Id;
@ -46,6 +37,15 @@ namespace DataLayer
AudibleCategoryId = id;
Name = name;
UpdateParentCategory(parentCategory);
}
public void UpdateParentCategory(Category parentCategory)
{
// don't overwrite with null but not an error
if (parentCategory != null)
ParentCategory = parentCategory;
}
public override string ToString() => $"[{AudibleCategoryId}] {Name}";

View File

@ -7,7 +7,7 @@ namespace DataLayer
public class Contributor
{
// Empty is a special case. use private ctor w/o validation
public static Contributor GetEmpty() => new() { ContributorId = -1, Name = "" };
public static Contributor GetEmpty() => new Contributor { ContributorId = -1, Name = "" };
// contributors search links are just name with url-encoding. space can be + or %20
// author search link: /search?searchAuthor=Robert+Bevan
@ -31,19 +31,21 @@ namespace DataLayer
public string AudibleContributorId { get; private set; }
private Contributor() { }
public Contributor(string name, string audibleContributorId = null)
public Contributor(string name)
{
Name = ArgumentValidator.EnsureNotNullOrWhiteSpace(name, nameof(name));
ArgumentValidator.EnsureNotNullOrWhiteSpace(name, nameof(name));
_booksLink = new HashSet<BookContributor>();
Name = name;
}
public Contributor(string name, string audibleContributorId) : this(name)
{
// don't overwrite with null or whitespace but not an error
if (!string.IsNullOrWhiteSpace(audibleContributorId))
AudibleContributorId = audibleContributorId;
}
public override string ToString() => Name;
public void SetAudibleContributorId(string audibleContributorId)
=> AudibleContributorId = audibleContributorId;
}
}

View File

@ -9,10 +9,9 @@ namespace DataLayer
public Book Book { get; private set; }
public DateTime DateAdded { get; private set; }
public string Account { get; private set; }
public bool IsDeleted { get; set; }
public bool AbsentFromLastScan { get; set; }
// immutable-ish. should be immutable. mutability is necessary for v3 => v4 upgrades
public string Account { get; private set; }
private LibraryBook() { }
public LibraryBook(Book book, DateTime dateAdded, string account)
@ -25,8 +24,10 @@ namespace DataLayer
Account = account;
}
public void SetAccount(string account) => Account = account;
// needed for v3 => v4 upgrade
public void UpdateAccount(string account)
=> Account ??= account;
public override string ToString() => $"{DateAdded:d} {Book}";
public override string ToString() => $"{DateAdded:d} {Book}";
}
}

View File

@ -5,14 +5,14 @@ using Dinah.Core;
namespace DataLayer
{
/// <summary>Parameterless ctor and setters should be used by EF only. Everything else should treat it as immutable</summary>
public class Rating : ValueObject_Static<Rating>, IComparable<Rating>, IComparable
public class Rating : ValueObject_Static<Rating>
{
public float OverallRating { get; private set; }
public float PerformanceRating { get; private set; }
public float StoryRating { get; private set; }
private Rating() { }
public Rating(float overallRating, float performanceRating, float storyRating)
internal Rating(float overallRating, float performanceRating, float storyRating)
{
OverallRating = overallRating;
PerformanceRating = performanceRating;
@ -38,16 +38,41 @@ namespace DataLayer
yield return StoryRating;
}
public override string ToString() => $"Overall={OverallRating} Perf={PerformanceRating} Story={StoryRating}";
public float FirstScore
=> OverallRating > 0 ? OverallRating
: PerformanceRating > 0 ? PerformanceRating
: StoryRating;
public int CompareTo(Rating other)
/// <summary>character: ★</summary>
const char STAR = '\u2605';
/// <summary>character: ½</summary>
const char HALF = '\u00BD';
string getStars(float score)
{
var compare = OverallRating.CompareTo(other.OverallRating);
if (compare != 0) return compare;
compare = PerformanceRating.CompareTo(other.PerformanceRating);
if (compare != 0) return compare;
return StoryRating.CompareTo(other.StoryRating);
var fullStars = (int)Math.Floor(score);
var starString = "".PadLeft(fullStars, STAR);
if (score - fullStars == 0.5f)
starString += HALF;
return starString;
}
public int CompareTo(object obj) => obj is Rating second ? CompareTo(second) : -1;
}
public string ToStarString()
{
var items = new List<string>();
if (OverallRating > 0)
items.Add($"Overall: {getStars(OverallRating)}");
if (PerformanceRating > 0)
items.Add($"Perform: {getStars(PerformanceRating)}");
if (StoryRating > 0)
items.Add($"Story: {getStars(StoryRating)}");
return string.Join("\r\n", items);
}
public override string ToString() => $"Overall={OverallRating} Perf={PerformanceRating} Story={StoryRating}";
}
}

View File

@ -0,0 +1,4 @@
namespace DataLayer
{
public enum Role { Author = 1, Narrator = 2, Publisher = 3 }
}

View File

@ -2,6 +2,7 @@
using System.Collections.Generic;
using System.Linq;
using Dinah.Core;
using Microsoft.EntityFrameworkCore;
namespace DataLayer
{
@ -47,6 +48,25 @@ namespace DataLayer
Name = name;
}
public void AddBook(Book book, float? index = null, DbContext context = null)
{
ArgumentValidator.EnsureNotNull(book, nameof(book));
// our add() is conditional upon what's already included in the collection.
// therefore if not loaded, a trip is required. might as well just load it
if (_booksLink == null)
{
ArgumentValidator.EnsureNotNull(context, nameof(context));
if (!context.Entry(this).IsKeySet)
throw new InvalidOperationException("Could not add series");
context.Entry(this).Collection(s => s.BooksLink).Load();
}
if (_booksLink.SingleOrDefault(sb => sb.Book == book) == null)
_booksLink.Add(new SeriesBook(this, book, index));
}
public override string ToString() => Name;
}
}

View File

@ -0,0 +1,40 @@
using Dinah.Core;
namespace DataLayer
{
public class SeriesBook
{
internal int SeriesId { get; private set; }
internal int BookId { get; private set; }
/// <summary>
/// <para>"index" not "order". This is both for sequence and display</para>
/// <para>Float allows for in-between books. eg: 2.5</para>
/// <para>To show 2 editions as the same book in a series, give them the same index</para>
/// <para>null IS NOT the same as 0. Some series call a book "book 0"</para>
/// </summary>
public float? Index { get; private set; }
public Series Series { get; private set; }
public Book Book { get; private set; }
private SeriesBook() { }
internal SeriesBook(Series series, Book book, float? index = null)
{
ArgumentValidator.EnsureNotNull(series, nameof(series));
ArgumentValidator.EnsureNotNull(book, nameof(book));
Series = series;
Book = book;
Index = index;
}
public void UpdateIndex(float? index)
{
if (index.HasValue)
Index = index.Value;
}
public override string ToString() => $"Series={Series} Book={Book}";
}
}

View File

@ -0,0 +1,84 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.RegularExpressions;
using Dinah.Core;
namespace DataLayer
{
public class UserDefinedItem
{
internal int BookId { get; private set; }
public Book Book { get; private set; }
private UserDefinedItem() { }
internal UserDefinedItem(Book book)
{
ArgumentValidator.EnsureNotNull(book, nameof(book));
Book = book;
// import previously saved tags
ArgumentValidator.EnsureNotNullOrWhiteSpace(book.AudibleProductId, nameof(book.AudibleProductId));
Tags = FileManager.TagsPersistence.GetTags(book.AudibleProductId);
}
private string _tags = "";
public string Tags
{
get => _tags;
set => _tags = sanitize(value);
}
public IEnumerable<string> TagsEnumerated => Tags == "" ? new string[0] : Tags.Split(null as char[], StringSplitOptions.RemoveEmptyEntries);
#region sanitize tags: space delimited. Inline/denormalized. Lower case. Alpha numeric and hyphen
// only legal chars are letters numbers underscores and separating whitespace
//
// technically, the only char.s which aren't easily supported are \ [ ]
// however, whitelisting is far safer than blacklisting (eg: new lines, non-printable character)
// it's easy to expand whitelist as needed
// for lucene, ToLower() isn't needed because search is case-inspecific. for here, it prevents duplicates
//
// there are also other allowed but misleading characters. eg: the ^ operator defines a 'boost' score
// full list of characters which must be escaped:
// + - && || ! ( ) { } [ ] ^ " ~ * ? : \
static Regex regex { get; } = new Regex(@"[^\w\d\s_]", RegexOptions.Compiled);
private static string sanitize(string input)
{
if (string.IsNullOrWhiteSpace(input))
return "";
var str = input
.Trim()
.ToLowerInvariant()
// assume a hyphen is supposed to be an underscore
.Replace("-", "_");
var unique = regex
// turn illegal characters into a space. this will also take care of turning new lines into spaces
.Replace(str, " ")
// split and remove excess spaces
.Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries)
// de-dup
.Distinct()
// this will prevent order from being relevant
.OrderBy(a => a);
// currently, the string is the canonical set. if we later make the collection into the canonical set:
// var tags = new Hashset<string>(list); // de-dup, order doesn't matter but can seem random due to hashing algo
// var isEqual = tagsNew.SetEquals(tagsOld);
return string.Join(" ", unique);
}
#endregion
// owned: not an optional one-to-one
/// <summary>The user's individual book rating</summary>
public Rating Rating { get; private set; } = new Rating(0, 0, 0);
public void UpdateRating(float overallRating, float performanceRating, float storyRating)
=> Rating.Update(overallRating, performanceRating, storyRating);
public override string ToString() => $"{Book} {Rating} {Tags}";
}
}

View File

@ -1,15 +1,16 @@
using DataLayer.Configurations;
using Dinah.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore;
namespace DataLayer
{
public class LibationContext : DbContext
public class LibationContext : InterceptableDbContext
{
// IMPORTANT: USING DbSet<>
// ========================
// these run against the db. linq queries against these MUST be translatable to sql. primatives only. no POCOs or this error occurs:
// Unable to create a constant value of type 'DataLayer.Contributor'. Only primitive types or enumeration types are supported in this context.
// to use full object-linq, load and use Local. HOWEVER, Local is only hashed/indexed on PK. All other searches are very slow
// to use full object-linq, load and use local
// load full table:
// List<Contributor> contributors = ...;
// Contributors.Load();
@ -18,12 +19,11 @@ namespace DataLayer
// // overwrite collection
// Entry(product).Collection(x => x.Narrators).Load();
// product.Narrators = narrators;
public DbSet<LibraryBook> LibraryBooks { get; private set; }
public DbSet<LibraryBook> Library { get; private set; }
public DbSet<Book> Books { get; private set; }
public DbSet<Contributor> Contributors { get; private set; }
public DbSet<Series> Series { get; private set; }
public DbSet<Category> Categories { get; private set; }
public DbSet<CategoryLadder> CategoryLadders { get; private set; }
public static LibationContext Create(string connectionString)
{
@ -35,20 +35,34 @@ namespace DataLayer
// see DesignTimeDbContextFactoryBase for info about ctors and connection strings/OnConfiguring()
internal LibationContext(DbContextOptions options) : base(options) { }
// called on each instantiation
protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder)
{
AddInterceptor(new TagPersistenceInterceptor());
base.OnConfiguring(optionsBuilder);
}
// typically only called once per execution; NOT once per instantiation
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
base.OnModelCreating(modelBuilder);
modelBuilder.ApplyConfiguration(new BookConfig());
modelBuilder.ApplyConfiguration(new BookConfig());
modelBuilder.ApplyConfiguration(new ContributorConfig());
modelBuilder.ApplyConfiguration(new BookContributorConfig());
modelBuilder.ApplyConfiguration(new LibraryBookConfig());
modelBuilder.ApplyConfiguration(new SeriesConfig());
modelBuilder.ApplyConfiguration(new SeriesBookConfig());
modelBuilder.ApplyConfiguration(new CategoryConfig());
modelBuilder.ApplyConfiguration(new CategoryLadderConfig());
modelBuilder.ApplyConfiguration(new BookCategoryConfig());
// seeds go here. examples in scratch pad
modelBuilder
.Entity<Category>()
.HasData(Category.GetEmpty());
modelBuilder
.Entity<Contributor>()
.HasData(Contributor.GetEmpty());
// views are now supported via "keyless entity types" (instead of "entity types" or the prev "query types"):
// https://docs.microsoft.com/en-us/ef/core/modeling/keyless-entity-types

View File

@ -1,14 +1,14 @@
using Dinah.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Diagnostics;
namespace DataLayer
{
public class LibationContextFactory : DesignTimeDbContextFactoryBase<LibationContext>
{
protected override LibationContext CreateNewInstance(DbContextOptions<LibationContext> options) => new LibationContext(options);
protected override void UseDatabaseEngine(DbContextOptionsBuilder optionsBuilder, string connectionString)
=> optionsBuilder.ConfigureWarnings(w => w.Ignore(RelationalEventId.PendingModelChangesWarning))
.UseSqlite(connectionString, ob => ob.UseQuerySplittingBehavior(QuerySplittingBehavior.SplitQuery));
protected override void UseDatabaseEngine(DbContextOptionsBuilder optionsBuilder, string connectionString) => optionsBuilder
//.UseSqlServer
.UseSqlite
(connectionString);
}
}

View File

@ -0,0 +1,35 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using Microsoft.EntityFrameworkCore;
namespace DataLayer
{
public static class BookQueries
{
public static Book GetBook_Flat_NoTracking(this LibationContext context, string productId)
=> context
.Books
.AsNoTracking()
.GetBook(productId);
public static Book GetBook(this IQueryable<Book> books, string productId)
=> books
.GetBooks()
.SingleOrDefault(b => b.AudibleProductId == productId);
/// <summary>This is still IQueryable. YOU MUST CALL ToList() YOURSELF</summary>
public static IQueryable<Book> GetBooks(this IQueryable<Book> books, Expression<Func<Book, bool>> predicate)
=> books
.GetBooks()
.Where(predicate);
public static IQueryable<Book> GetBooks(this IQueryable<Book> books)
=> books
// owned items are always loaded. eg: book.UserDefinedItem, book.Supplements
.Include(b => b.SeriesLink).ThenInclude(sb => sb.Series)
.Include(b => b.ContributorsLink).ThenInclude(c => c.Contributor)
.Include(b => b.Category).ThenInclude(c => c.ParentCategory);
}
}

View File

@ -0,0 +1,19 @@
using System;
using System.Linq;
namespace DataLayer
{
public static class GenericPaging
{
public static IQueryable<T> Page<T>(this IQueryable<T> query, int pageNumZeroStart, int pageSize)
{
if (pageSize < 1)
throw new ArgumentOutOfRangeException(nameof(pageSize), "pageSize must be at least 1");
if (pageNumZeroStart > 0)
query = query.Skip(pageNumZeroStart * pageSize);
return query.Take(pageSize);
}
}
}

View File

@ -0,0 +1,41 @@
using System.Collections.Generic;
using System.Linq;
using Microsoft.EntityFrameworkCore;
namespace DataLayer
{
public static class LibraryQueries
{
public static List<LibraryBook> GetLibrary_Flat_WithTracking(this LibationContext context)
=> context
.Library
.GetLibrary()
.ToList();
public static List<LibraryBook> GetLibrary_Flat_NoTracking(this LibationContext context)
=> context
.Library
.AsNoTracking()
.GetLibrary()
.ToList();
public static LibraryBook GetLibraryBook_Flat_NoTracking(this LibationContext context, string productId)
=> context
.Library
.AsNoTracking()
.GetLibraryBook(productId);
/// <summary>This is still IQueryable. YOU MUST CALL ToList() YOURSELF</summary>
public static IQueryable<LibraryBook> GetLibrary(this IQueryable<LibraryBook> library)
=> library
// owned items are always loaded. eg: book.UserDefinedItem, book.Supplements
.Include(le => le.Book).ThenInclude(b => b.SeriesLink).ThenInclude(sb => sb.Series)
.Include(le => le.Book).ThenInclude(b => b.ContributorsLink).ThenInclude(c => c.Contributor)
.Include(le => le.Book).ThenInclude(b => b.Category).ThenInclude(c => c.ParentCategory);
public static LibraryBook GetLibraryBook(this IQueryable<LibraryBook> library, string productId)
=> library
.GetLibrary()
.SingleOrDefault(le => le.Book.AudibleProductId == productId);
}
}

View File

@ -0,0 +1,30 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Dinah.Core.Collections.Generic;
using Dinah.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore;
namespace DataLayer
{
internal class TagPersistenceInterceptor : IDbInterceptor
{
public void Executed(DbContext context) { }
public void Executing(DbContext context)
{
var tagsCollection
= context
.ChangeTracker
.Entries()
.Where(e => e.State.In(EntityState.Modified, EntityState.Added))
.Select(e => e.Entity as UserDefinedItem)
.Where(udi => udi != null)
// do NOT filter out entires with blank tags. blank is the valid way to show the absence of tags
.Select(t => (t.Book.AudibleProductId, t.Tags))
.ToList();
FileManager.TagsPersistence.Save(tagsCollection);
}
}
}

View File

@ -0,0 +1,38 @@
using System;
using System.Collections.Generic;
using System.Linq;
namespace DataLayer.Utilities
{
public static class LocalDatabaseInfo
{
public static List<string> GetLocalDBInstances()
{
// Start the child process.
using var p = new System.Diagnostics.Process
{
StartInfo = new System.Diagnostics.ProcessStartInfo
{
UseShellExecute = false,
RedirectStandardOutput = true,
FileName = "cmd.exe",
Arguments = "/C sqllocaldb info",
CreateNoWindow = true,
WindowStyle = System.Diagnostics.ProcessWindowStyle.Hidden
}
};
p.Start();
var output = p.StandardOutput.ReadToEnd();
p.WaitForExit();
// if LocalDb is not installed then it will return that 'sqllocaldb' is not recognized as an internal or external command operable program or batch file
return string.IsNullOrWhiteSpace(output) || output.Contains("not recognized")
? new List<string>()
: output
.Split(new string[] { Environment.NewLine }, StringSplitOptions.None)
.Select(i => i.Trim())
.Where(i => !string.IsNullOrEmpty(i))
.ToList();
}
}
}

View File

@ -0,0 +1,122 @@
using System;
using Dinah.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore;
namespace _scratch_pad
{
////// to use this as a console, open properties and change from class library => console
//// DON'T FORGET TO REVERT IT
//public class Program
//{
// public static void Main(string[] args)
// {
// var user = new Student() { Name = "Dinah Cheshire" };
// var udi = new UserDef { UserDefId = 1, TagsRaw = "my,tags" };
// using var context = new MyTestContextDesignTimeDbContextFactory().Create();
// context.Add(user);
// //context.Add(udi);
// context.Update(udi);
// context.SaveChanges();
// Console.WriteLine($"Student was saved in the database with id: {user.Id}");
// }
//}
public class MyTestContextDesignTimeDbContextFactory : DesignTimeDbContextFactoryBase<MyTestContext>
{
protected override MyTestContext CreateNewInstance(DbContextOptions<MyTestContext> options) => new MyTestContext(options);
protected override void UseDatabaseEngine(DbContextOptionsBuilder optionsBuilder, string connectionString) => optionsBuilder.UseSqlite(connectionString);
}
public class MyTestContext : DbContext
{
// see DesignTimeDbContextFactoryBase for info about ctors and connection strings/OnConfiguring()
public MyTestContext(DbContextOptions<MyTestContext> options) : base(options) { }
#region classes for OnModelCreating() seed example
class Blog
{
public int BlogId { get; set; }
public string Url { get; set; }
public System.Collections.Generic.ICollection<Post> Posts { get; set; }
}
class Post
{
public int PostId { get; set; }
public string Content { get; set; }
public string Title { get; set; }
public int BlogId { get; set; }
public Blog Blog { get; set; }
public Name AuthorName { get; set; }
}
class Name
{
public string First { get; set; }
public string Last { get; set; }
}
#endregion
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
base.OnModelCreating(modelBuilder);
// config
modelBuilder.Entity<Blog>(entity => entity.Property(e => e.Url).IsRequired());
modelBuilder.Entity<Order>().OwnsOne(p => p.OrderDetails, cb =>
{
cb.OwnsOne(c => c.BillingAddress);
cb.OwnsOne(c => c.ShippingAddress);
});
modelBuilder.Entity<Post>(entity =>
entity
.HasOne(d => d.Blog)
.WithMany(p => p.Posts)
.HasForeignKey("BlogId"));
// BlogSeed
modelBuilder.Entity<Blog>().HasData(new Blog { BlogId = 1, Url = "http://sample.com" });
// PostSeed
modelBuilder.Entity<Post>().HasData(new Post() { BlogId = 1, PostId = 1, Title = "First post", Content = "Test 1" });
// AnonymousPostSeed
modelBuilder.Entity<Post>().HasData(new { BlogId = 1, PostId = 2, Title = "Second post", Content = "Test 2" });
// OwnedTypeSeed
modelBuilder.Entity<Post>().OwnsOne(p => p.AuthorName).HasData(
new { PostId = 1, First = "Andriy", Last = "Svyryd" },
new { PostId = 2, First = "Diego", Last = "Vega" });
}
public DbSet<Student> Students { get; set; }
public DbSet<UserDef> UserDefs { get; set; }
public DbSet<Order> Orders { get; set; }
}
public class Student
{
public int Id { get; set; }
public string Name { get; set; }
}
public class UserDef
{
public int UserDefId { get; set; }
public string TagsRaw { get; set; }
}
public class Order
{
public int Id { get; set; }
public OrderDetails OrderDetails { get; set; }
}
public class OrderDetails
{
public StreetAddress BillingAddress { get; set; }
public StreetAddress ShippingAddress { get; set; }
}
public class StreetAddress
{
public string Street { get; set; }
public string City { get; set; }
}
}

View File

@ -0,0 +1,57 @@
FOR QUICK MIGRATION INSTRUCTIONS:
_DB_NOTES.txt
HOW TO CREATE: EF CORE PROJECT
==============================
example is for sqlite but the same works with MsSql
nuget
Microsoft.EntityFrameworkCore.Tools (needed for using Package Manager Console)
Microsoft.EntityFrameworkCore.Sqlite
MIGRATIONS
require core, not standard
this can be a problem b/c standard and framework can only reference standard, not core
TO USE MIGRATIONS (core and/or standard)
add to csproj
<PropertyGroup>
<GenerateRuntimeConfigurationFiles>true</GenerateRuntimeConfigurationFiles>
</PropertyGroup>
TO USE MIGRATIONS AS *BOTH* CORE AND STANDARD
edit csproj
pluralize this xml tag
from: TargetFramework
to: TargetFrameworks
inside of TargetFrameworks
from: netstandard2.1
to: netcoreapp3.1;netstandard2.1
run. error
SQLite Error 1: 'no such table: Blogs'.
set project "Set as StartUp Project"
Tools >> Nuget Package Manager >> Package Manager Console
default project: Examples\SQLite_NETCore2_0
PM> add-migration InitialCreate
PM> Update-Database
if add-migration xyz throws and error, don't take the error msg at face value. try again with add-migration xyz -verbose
new sqlite .db file created: Copy always/Copy if newer
or copy .db file to destination
relative:
optionsBuilder.UseSqlite("Data Source=blogging.db");
absolute (use fwd slashes):
optionsBuilder.UseSqlite("Data Source=C:/foo/bar/blogging.db");
REFERENCE ARTICLES
------------------
https://docs.microsoft.com/en-us/ef/core/get-started/netcore/new-db-sqlite
https://carlos.mendible.com/2016/07/11/step-by-step-dotnet-core-and-entity-framework-core/
https://www.benday.com/2017/12/19/ef-core-2-0-migrations-without-hard-coded-connection-strings/

View File

@ -0,0 +1,14 @@
{
"ConnectionStrings": {
"LibationContext_sqlserver": "Server=(LocalDb)\\MSSQLLocalDB;Database=DataLayer.LibationContext;Integrated Security=true;",
"// this connection string is ONLY used for DataLayer's Migrations. this appsettings.json file is NOT used at all by application; it is overwritten": "",
"LibationContext": "Data Source=LibationContext.db;Foreign Keys=False;",
"// sqlite notes": "",
"// absolute path example": "Data Source=C:/foo/bar/sample.db",
"// relative path example": "Data Source=sample.db",
"// on windows: sqlite paths accept windows and/or unix slashes": "",
"MyTestContext": "Data Source=%DESKTOP%/sample.db"
}
}

View File

@ -1,3 +0,0 @@
{
"LibationFiles": "/config-internal"
}

View File

@ -1,174 +0,0 @@
#!/bin/bash
error() {
log "ERROR" "$1"
}
warn() {
log "WARNING" "$1"
}
info() {
log "info" "$1"
}
debug() {
if [ "${LOG_LEVEL}" = "debug" ]; then
log "debug" "$1"
fi
}
log() {
LEVEL=$1
MESSAGE=$2
printf "$(date '+%F %T') %s: %s\n" "${LEVEL}" "${MESSAGE}"
}
init_config_file() {
FILE=$1
FULLPATH=${LIBATION_CONFIG_DIR}/${FILE}
if [ -f ${FULLPATH} ]; then
info "loading ${FILE}"
cp ${FULLPATH} ${LIBATION_CONFIG_INTERNAL}/
return 0
else
warn "${FULLPATH} not found, creating empty file"
echo "{}" > ${LIBATION_CONFIG_INTERNAL}/${FILE}
return 1
fi
}
update_settings() {
FILE=$1
KEY=$2
VALUE=$3
info "setting ${KEY} to ${VALUE}"
echo $(jq --arg k "${KEY}" --arg v "${VALUE}" '.[$k] = $v' ${LIBATION_CONFIG_INTERNAL}/${FILE}) > ${LIBATION_CONFIG_INTERNAL}/${FILE}.tmp
mv ${LIBATION_CONFIG_INTERNAL}/${FILE}.tmp ${LIBATION_CONFIG_INTERNAL}/${FILE}
}
is_mounted() {
DIR=$1
if grep -qs "${DIR} " /proc/mounts;
then
return 0
else
return 1
fi
}
create_db() {
DBFILE=$1
if [ -f "${DBFILE}" ]; then
warn "prexisting database found when creating"
return 0
else
if ! touch "${DBFILE}"; then
error "unable to create database, check permissions on host"
exit 1
fi
return 1
fi
}
setup_db() {
DBPATH=$1
dbpattern="*.db"
debug "using database directory ${DBPATH}"
# Figure out the right databse file
if [[ -z "${LIBATION_DB_FILE}" ]];
then
dbCount=$(find "${DBPATH}" -maxdepth 1 -type f -name "${dbpattern}" | wc -l)
if [ "${dbCount}" -gt 1 ];
then
error "too many database files found, set LIBATION_DB_FILE to the filename you wish to use"
exit 1
elif [ "${dbCount}" -eq 1 ];
then
files=( ${DBPATH}/${dbpattern} )
FILE=${files[0]}
else
FILE="${DBPATH}/LibationContext.db"
fi
else
FILE="${DBPATH}/${LIBATION_DB_FILE}"
fi
debug "planning to use database ${FILE}"
if [ -f "${FILE}" ]; then
info "database found at ${FILE}"
elif [ ${LIBATION_CREATE_DB} = "true" ];
then
warn "database not found, creating one at ${FILE}"
create_db ${FILE}
else
error "database not found and creation is disabled"
exit 1
fi
ln -s "${FILE}" "${LIBATION_CONFIG_INTERNAL}/LibationContext.db"
}
run() {
info "scanning accounts"
/libation/LibationCli scan
info "liberating books"
/libation/LibationCli liberate
}
main() {
info "initializing libation"
init_config_file AccountsSettings.json
init_config_file Settings.json
info "loading settings"
update_settings Settings.json Books "${LIBATION_BOOKS_DIR:-/data}"
update_settings Settings.json InProgress /tmp
info "loading database"
# If user provides a separate database mount, use that
if is_mounted "${LIBATION_DB_DIR}";
then
DB_LOCATION=${LIBATION_DB_DIR}
# Otherwise, use the config directory
else
DB_LOCATION=${LIBATION_CONFIG_DIR}
fi
setup_db ${DB_LOCATION}
# Try to warn if books dir wasn't mounted in
if ! is_mounted "${LIBATION_BOOKS_DIR}";
then
warn "${LIBATION_BOOKS_DIR} does not appear to be mounted, books will not be saved"
fi
# Let the user know what the run type will be
if [[ -z "${SLEEP_TIME}" ]]; then
SLEEP_TIME=-1
fi
if [ "${SLEEP_TIME}" == -1 ]; then
info "running once"
else
info "running every ${SLEEP_TIME}"
fi
# loop
while true
do
run
# Liberate only once if SLEEP_TIME was set to -1
if [ "${SLEEP_TIME}" == -1 ]; then
break
fi
sleep "${SLEEP_TIME}"
done
info "exiting"
}
main

View File

@ -1,39 +0,0 @@
# Dockerfile
FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/dotnet/sdk:9.0 AS build
ARG TARGETARCH
COPY Source /Source
RUN dotnet publish \
/Source/LibationCli/LibationCli.csproj \
--arch ${TARGETARCH} \
--configuration Release \
--output /Source/bin/Publish/Linux-chardonnay \
-p:PublishProfile=/Source/LibationCli/Properties/PublishProfiles/LinuxProfile.pubxml
FROM mcr.microsoft.com/dotnet/runtime:9.0
ARG USER_UID=1001
ARG USER_GID=1001
# Set the character set that will be used for folder and filenames when liberating
ENV LANG=C.UTF-8
ENV LC_ALL=C.UTF-8
ENV SLEEP_TIME=-1
ENV LIBATION_CONFIG_INTERNAL=/config-internal
ENV LIBATION_CONFIG_DIR=/config
ENV LIBATION_DB_DIR=/db
ENV LIBATION_DB_FILE=
ENV LIBATION_CREATE_DB=true
ENV LIBATION_BOOKS_DIR=/data
RUN apt-get update && apt-get -y upgrade && \
apt-get install -y jq && \
mkdir -m777 ${LIBATION_CONFIG_INTERNAL} ${LIBATION_BOOKS_DIR}
COPY --from=build /Source/bin/Publish/Linux-chardonnay /libation
COPY Docker/* /libation
USER ${USER_UID}:${USER_GID}
CMD ["/libation/liberate.sh"]

Some files were not shown because too many files have changed in this diff Show More