Compare commits

..

No commits in common. "master" and "v3.0" have entirely different histories.
master ... v3.0

1065 changed files with 22980 additions and 91223 deletions

View File

@ -1,5 +0,0 @@
{
"CdmUrls": [
"https://ollj0gz40d.execute-api.us-west-2.amazonaws.com/default/AudibleCdm"
]
}

View File

@ -1,43 +0,0 @@
---
name: Bug report
about: Create a report to help us improve Libation
title: ''
labels: bug
assignees: ''
---
PLEASE FILL OUT THE FOLLOWING. Bug reports with limited information or lacking an attached log file may get limited or delayed help.
___
## Describe the bug
A clear and concise description of what the bug is.
## To Reproduce
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
## Expected behavior
A clear and concise description of what you expected to happen.
## Screenshots
If applicable, add screenshots to help explain your problem.
## Platform
[e.g. Windows 10, Windows 11, Mac, Linux (State distribution)]
## Log Files
Attach your Libation log file here. If your user folder contains the file "LibationCrash.log", attach that also.
**Default Log File Locations**
|Platform|Folder|
|-|-|
|Windows|`%userprofile%\Libation`|
|macOS|`~/Library/Application Support/Libation`|
|Linux|`~/.local/share/Libation`|
Alternative, you may open the log file folder from within Libation. Open Libation's settings, and on the first tab in Settings you can click the button 'Open log folder'.

View File

@ -1,31 +0,0 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: enhancement
assignees: ''
---
**No-go ideas**
There are lots of great ideas and many are beyond what we intend to do for Libation. Some good ideas which we do not intend to pursue:
* comprehensive api/cli
* aax/audiobook import
* bulk rename of existing files
* general metadata/tag editor
* playback features
* web gui
* supporting non-audible vendors
* official docker support
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

View File

@ -1,8 +0,0 @@
---
version: 2
updates:
# Maintain dependencies for GitHub Actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"

View File

@ -1,132 +0,0 @@
# build-linux.yml
# Reusable workflow that builds the Linux and MacOS (x64 and arm64) versions of Libation.
---
name: build
on:
workflow_call:
inputs:
version_override:
type: string
description: "Version number override"
required: false
run_unit_tests:
type: boolean
description: "Skip running unit tests"
required: false
default: true
runs_on:
type: string
description: "The GitHub hosted runner to use"
required: true
OS:
type: string
description: >
The operating system targeted by the build.
There must be a corresponding Bundle_$OS.sh script file in ./Scripts
required: true
architecture:
type: string
description: "CPU architecture targeted by the build."
required: true
env:
DOTNET_CONFIGURATION: "Release"
DOTNET_VERSION: "9.0.x"
RELEASE_NAME: "chardonnay"
jobs:
build:
name: "${{ inputs.OS }}-${{ inputs.architecture }}"
runs-on: ${{ inputs.runs_on }}
steps:
- uses: actions/checkout@v5
- name: Setup .NET
uses: actions/setup-dotnet@v5
with:
dotnet-version: ${{ env.DOTNET_VERSION }}
env:
NUGET_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Get version
id: get_version
run: |
inputVersion="${{ inputs.version_override }}"
if [[ "${#inputVersion}" -gt 0 ]]
then
version="${inputVersion}"
else
version="$(grep -Eio -m 1 '<Version>.*</Version>' ./Source/AppScaffolding/AppScaffolding.csproj | sed -r 's/<\/?Version>//g')"
fi
echo "version=${version}" >> "${GITHUB_OUTPUT}"
- name: Unit test
if: ${{ inputs.run_unit_tests }}
working-directory: ./Source
run: dotnet test
- name: Publish
id: publish
working-directory: ./Source
run: |
if [[ "${{ inputs.OS }}" == "MacOS" ]]
then
display_os="macOS"
RUNTIME_ID="osx-${{ inputs.architecture }}"
else
display_os="Linux"
RUNTIME_ID="linux-${{ inputs.architecture }}"
fi
OUTPUT="bin/Publish/${display_os}-${{ inputs.architecture }}-${{ env.RELEASE_NAME }}"
echo "display_os=${display_os}" >> $GITHUB_OUTPUT
echo "Runtime Identifier: $RUNTIME_ID"
echo "Output Directory: $OUTPUT"
dotnet publish \
LibationAvalonia/LibationAvalonia.csproj \
--runtime $RUNTIME_ID \
--configuration ${{ env.DOTNET_CONFIGURATION }} \
--output $OUTPUT \
-p:PublishProfile=LibationAvalonia/Properties/PublishProfiles/${display_os}Profile.pubxml
dotnet publish \
LoadByOS/${display_os}ConfigApp/${display_os}ConfigApp.csproj \
--runtime $RUNTIME_ID \
--configuration ${{ env.DOTNET_CONFIGURATION }} \
--output $OUTPUT \
-p:PublishProfile=LoadByOS/Properties/${display_os}ConfigApp/PublishProfiles/${display_os}Profile.pubxml
dotnet publish \
LibationCli/LibationCli.csproj \
--runtime $RUNTIME_ID \
--configuration ${{ env.DOTNET_CONFIGURATION }} \
--output $OUTPUT \
-p:PublishProfile=LibationCli/Properties/PublishProfiles/${display_os}Profile.pubxml
dotnet publish \
HangoverAvalonia/HangoverAvalonia.csproj \
--runtime $RUNTIME_ID \
--configuration ${{ env.DOTNET_CONFIGURATION }} \
--output $OUTPUT \
-p:PublishProfile=HangoverAvalonia/Properties/PublishProfiles/${display_os}Profile.pubxml
- name: Build bundle
id: bundle
working-directory: ./Source/bin/Publish/${{ steps.publish.outputs.display_os }}-${{ inputs.architecture }}-${{ env.RELEASE_NAME }}
run: |
BUNDLE_DIR=$(pwd)
echo "Bundle dir: ${BUNDLE_DIR}"
cd ..
SCRIPT=../../../Scripts/Bundle_${{ inputs.OS }}.sh
chmod +rx ${SCRIPT}
${SCRIPT} "${BUNDLE_DIR}" "${{ steps.get_version.outputs.version }}" "${{ inputs.architecture }}"
artifact=$(ls ./bundle)
echo "artifact=${artifact}" >> "${GITHUB_OUTPUT}"
- name: Publish bundle
uses: actions/upload-artifact@v4
with:
name: ${{ steps.bundle.outputs.artifact }}
path: ./Source/bin/Publish/bundle/${{ steps.bundle.outputs.artifact }}
if-no-files-found: error
retention-days: 7

View File

@ -1,118 +0,0 @@
# build-windows.yml
# Reusable workflow that builds the Windows versions of Libation.
---
name: build
on:
workflow_call:
inputs:
version_override:
type: string
description: "Version number override"
required: false
run_unit_tests:
type: boolean
description: "Skip running unit tests"
required: false
default: true
architecture:
type: string
description: "CPU architecture targeted by the build."
required: true
env:
DOTNET_CONFIGURATION: "Release"
DOTNET_VERSION: "9.0.x"
jobs:
build:
name: "${{ matrix.os }}-${{ matrix.release_name }}-${{ inputs.architecture }}"
runs-on: windows-latest
env:
OUTPUT_NAME: "${{ matrix.os }}-${{ matrix.release_name }}-${{ inputs.architecture }}"
RUNTIME_ID: "win-${{ inputs.architecture }}"
strategy:
matrix:
os: [Windows]
ui: [Avalonia]
release_name: [chardonnay]
include:
- os: Windows
ui: WinForms
release_name: classic
prefix: Classic-
steps:
- uses: actions/checkout@v5
- name: Setup .NET
uses: actions/setup-dotnet@v5
with:
dotnet-version: ${{ env.DOTNET_VERSION }}
env:
NUGET_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Get version
id: get_version
run: |
if ("${{ inputs.version_override }}".length -gt 0) {
$version = "${{ inputs.version_override }}"
} else {
$version = (Select-Xml -Path "./Source/AppScaffolding/AppScaffolding.csproj" -XPath "/Project/PropertyGroup/Version").Node.InnerXML.Trim()
}
"version=$version" >> $env:GITHUB_OUTPUT
- name: Unit test
if: ${{ inputs.run_unit_tests }}
working-directory: ./Source
run: dotnet test
- name: Publish
working-directory: ./Source
run: |
dotnet publish `
Libation${{ matrix.ui }}/Libation${{ matrix.ui }}.csproj `
--runtime ${{ env.RUNTIME_ID }} `
--configuration ${{ env.DOTNET_CONFIGURATION }} `
--output bin/Publish/${{ env.OUTPUT_NAME }} `
-p:PublishProfile=Libation${{ matrix.ui }}/Properties/PublishProfiles/${{ matrix.os }}Profile.pubxml
dotnet publish `
LoadByOS/${{ matrix.os }}ConfigApp/${{ matrix.os }}ConfigApp.csproj `
--runtime ${{ env.RUNTIME_ID }} `
--configuration ${{ env.DOTNET_CONFIGURATION }} `
--output bin/Publish/${{ env.OUTPUT_NAME }} `
-p:PublishProfile=LoadByOS/${{ matrix.os }}ConfigApp/PublishProfiles/${{ matrix.os }}Profile.pubxml
dotnet publish `
LibationCli/LibationCli.csproj `
--runtime ${{ env.RUNTIME_ID }} `
--configuration ${{ env.DOTNET_CONFIGURATION }} `
--output bin/Publish/${{ env.OUTPUT_NAME }} `
-p:DefineConstants="${{ matrix.release_name }}" `
-p:PublishProfile=LibationCli/Properties/PublishProfiles/${{ matrix.os }}Profile.pubxml
dotnet publish `
Hangover${{ matrix.ui }}/Hangover${{ matrix.ui }}.csproj `
--runtime ${{ env.RUNTIME_ID }} `
--configuration ${{ env.DOTNET_CONFIGURATION }} `
--output bin/Publish/${{ env.OUTPUT_NAME }} `
-p:PublishProfile=Hangover${{ matrix.ui }}/Properties/PublishProfiles/${{ matrix.os }}Profile.pubxml
- name: Zip artifact
id: zip
working-directory: ./Source/bin/Publish
run: |
$bin_dir = "${{ env.OUTPUT_NAME }}\"
$delfiles = @(
"WindowsConfigApp.exe",
"WindowsConfigApp.runtimeconfig.json",
"WindowsConfigApp.deps.json"
)
foreach ($file in $delfiles){ if (test-path $bin_dir$file){ Remove-Item $bin_dir$file } }
$artifact="${{ matrix.prefix }}Libation.${{ steps.get_version.outputs.version }}-" + "${{ matrix.os }}".ToLower() + "-${{ matrix.release_name }}-${{ inputs.architecture }}"
"artifact=$artifact" >> $env:GITHUB_OUTPUT
Compress-Archive -Path "${bin_dir}*" -DestinationPath "$artifact.zip"
- name: Publish artifact
uses: actions/upload-artifact@v4
with:
name: ${{ steps.zip.outputs.artifact }}.zip
path: ./Source/bin/Publish/${{ steps.zip.outputs.artifact }}.zip
if-no-files-found: error
retention-days: 7

View File

@ -1,53 +0,0 @@
# build.yml
# Reusable workflow that builds Libation for all platforms.
---
name: build
on:
workflow_call:
inputs:
version_override:
type: string
description: "Version number override"
required: false
run_unit_tests:
type: boolean
description: "Skip running unit tests"
required: false
default: true
jobs:
windows:
strategy:
matrix:
architecture: [x64]
uses: ./.github/workflows/build-windows.yml
with:
version_override: ${{ inputs.version_override }}
run_unit_tests: ${{ inputs.run_unit_tests }}
architecture: ${{ matrix.architecture }}
linux:
strategy:
matrix:
OS: [Redhat, Debian]
architecture: [x64, arm64]
uses: ./.github/workflows/build-linux.yml
with:
version_override: ${{ inputs.version_override }}
runs_on: ubuntu-latest
OS: ${{ matrix.OS }}
architecture: ${{ matrix.architecture }}
run_unit_tests: ${{ inputs.run_unit_tests }}
macos:
strategy:
matrix:
architecture: [x64, arm64]
uses: ./.github/workflows/build-linux.yml
with:
version_override: ${{ inputs.version_override }}
runs_on: macos-latest
OS: MacOS
architecture: ${{ matrix.architecture }}
run_unit_tests: ${{ inputs.run_unit_tests }}

View File

@ -1,63 +0,0 @@
# docker.yml
# Reusable workflow that builds a docker image for Libation.
---
name: docker
on:
workflow_call:
inputs:
version:
type: string
description: "Version number"
required: true
release:
type: boolean
description: "Is this a release build?"
required: true
secrets:
docker_username:
required: true
docker_token:
required: true
jobs:
build_and_push:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v5
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
if: ${{ inputs.release }}
uses: docker/login-action@v3
with:
username: ${{ secrets.docker_username }}
password: ${{ secrets.docker_token }}
- name: Generate docker image tags
id: metadata
uses: docker/metadata-action@v5
with:
flavor: |
latest=true
images: |
name=${{ secrets.docker_username }}/libation
tags: |
type=raw,value=${{ inputs.version }},enable=${{ inputs.release }}
- name: Build and push image
uses: docker/build-push-action@v6
with:
platforms: linux/amd64,linux/arm64
push: ${{ steps.metadata.outputs.tags != ''}}
cache-from: type=gha
cache-to: type=gha,mode=max
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}

View File

@ -1,58 +0,0 @@
# release.yml
# Builds and creates the release on any tags starting with a `v`
---
name: release
on:
push:
tags:
- "v*"
jobs:
prerelease:
runs-on: ubuntu-latest
outputs:
version: ${{ steps.get_version.outputs.version }}
steps:
- name: Get tag version
id: get_version
run: |
export TAG="${{ github.ref_name }}"
echo "version=${TAG#v}" >> "${GITHUB_OUTPUT}"
docker:
needs: [prerelease]
uses: ./.github/workflows/docker.yml
with:
version: ${{ needs.prerelease.outputs.version }}
release: true
secrets:
docker_username: ${{ secrets.DOCKERHUB_USERNAME }}
docker_token: ${{ secrets.DOCKERHUB_TOKEN }}
build:
needs: [prerelease]
uses: ./.github/workflows/build.yml
with:
version_override: ${{ needs.prerelease.outputs.version }}
run_unit_tests: false
release:
needs: [prerelease, build]
runs-on: ubuntu-latest
steps:
- name: Download artifacts
uses: actions/download-artifact@v5
with:
path: artifacts
pattern: "*(Classic-)Libation.*"
- name: Release
id: release
uses: softprops/action-gh-release@v2
with:
name: Libation ${{ needs.prerelease.outputs.version }}
body: <Put a body here>
token: ${{ secrets.GITHUB_TOKEN }}
draft: true
prerelease: false
files: |
artifacts/*/*

View File

@ -1,22 +0,0 @@
name: Validate MetaInfo
"on":
pull_request:
branches: ["master"]
paths:
- .github/workflows/validate-appstream-metainfo.yml
- Source/LoadByOS/LinuxConfigApp/com.getlibation.Libation.metainfo.xml
push:
branches: ["master"]
paths:
- .github/workflows/validate-appstream-metainfo.yml
- Source/LoadByOS/LinuxConfigApp/com.getlibation.Libation.metainfo.xml
jobs:
validate-appstream-metainfo:
runs-on: ubuntu-latest
container:
image: ghcr.io/flathub/flatpak-builder-lint:latest
steps:
- uses: actions/checkout@v5
- name: Check the MetaInfo file
run: flatpak-builder-lint appstream Source/LoadByOS/LinuxConfigApp/com.getlibation.Libation.metainfo.xml

View File

@ -1,21 +0,0 @@
name: Check desktop file
"on":
pull_request:
branches: ["master"]
paths:
- .github/workflows/validate-desktop-file.yml
- Source/LoadByOS/LinuxConfigApp/Libation.desktop
push:
branches: ["master"]
paths:
- .github/workflows/validate-desktop-file.yml
- Source/LoadByOS/LinuxConfigApp/Libation.desktop
jobs:
validate-desktop-file:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- run: sudo apt --yes install desktop-file-utils
- name: Check the desktop file
run: desktop-file-validate Source/LoadByOS/LinuxConfigApp/Libation.desktop

View File

@ -1,22 +0,0 @@
# validate.yml
# Validates that Libation will build on a pull request or push to master.
---
name: validate
on:
push:
branches: [master]
pull_request:
branches: [master]
jobs:
build:
uses: ./.github/workflows/build.yml
docker:
uses: ./.github/workflows/docker.yml
with:
version: ${GITHUB_SHA}
release: false
secrets:
docker_username: ${{ secrets.DOCKERHUB_USERNAME }}
docker_token: ${{ secrets.DOCKERHUB_TOKEN }}

77
.gitignore vendored
View File

@ -4,7 +4,6 @@
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
# User-specific files
*.rsuser
*.suo
*.user
*.userosscache
@ -13,9 +12,6 @@
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Mono auto generated files
mono_crash.*
# Build results
[Dd]ebug/
[Dd]ebugPublic/
@ -23,15 +19,10 @@ mono_crash.*
[Rr]eleases/
x64/
x86/
[Ww][Ii][Nn]32/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Oo]ut/
[Ll]og/
[Ll]ogs/
# Visual Studio 2015/2017 cache/options directory
.vs/
@ -45,10 +36,9 @@ Generated\ Files/
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUnit
# NUNIT
*.VisualState.xml
TestResult.xml
nunit-*.xml
# Build Results of an ATL Project
[Dd]ebugPS/
@ -62,9 +52,7 @@ BenchmarkDotNet.Artifacts/
project.lock.json
project.fragment.lock.json
artifacts/
# ASP.NET Scaffolding
ScaffoldingReadMe.txt
**/Properties/launchSettings.json
# StyleCop
StyleCopReport.xml
@ -72,7 +60,7 @@ StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*_i.h
*.ilk
*.meta
*.obj
@ -89,7 +77,6 @@ StyleCopReport.xml
*.tlh
*.tmp
*.tmp_proj
*_wpftmp.csproj
*.log
*.vspscc
*.vssscc
@ -132,6 +119,9 @@ _ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# JustCode is a .NET coding add-in
.JustCode
# TeamCity is a build add-in
_TeamCity*
@ -142,11 +132,6 @@ _TeamCity*
.axoCover/*
!.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
coverage*.json
coverage*.xml
coverage*.info
# Visual Studio code coverage results
*.coverage
*.coveragexml
@ -184,7 +169,7 @@ publish/
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
#*.pubxml
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
@ -194,8 +179,6 @@ PublishScripts/
# NuGet Packages
*.nupkg
# NuGet Symbol Packages
*.snupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
@ -220,14 +203,12 @@ BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
*.appxbundle
*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
!*.[Cc]ache/
# Others
ClientBin/
@ -240,7 +221,7 @@ ClientBin/
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
@ -271,9 +252,6 @@ ServiceFabricBackup/
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- [Bb]ackup.rdl
*- [Bb]ackup ([0-9]).rdl
*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
FakesAssemblies/
@ -309,8 +287,12 @@ paket-files/
# FAKE - F# Make
.fake/
# CodeRush personal settings
.cr/personal
# JetBrains Rider
.idea/
*.sln.iml
# CodeRush
.cr/
# Python Tools for Visual Studio (PTVS)
__pycache__/
@ -335,7 +317,7 @@ __pycache__/
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
@ -344,30 +326,5 @@ ASALocalRun/
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
# Ionide (cross platform F# VS Code tools) working folder
.ionide/
# Fody - auto-generated XML schema
FodyWeavers.xsd
### manually ignored files
# Windows shortcuts
*.lnk
/__TODO.txt
/DataLayer/LibationContext.db
*/bin-Avalonia

View File

@ -1,10 +0,0 @@
{
"WindowsClassic": "Classic-Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-win(?:dows)?-classic-x64\\.zip",
"WindowsAvalonia": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-win(?:dows)?-chardonnay-x64\\.zip",
"LinuxAvalonia": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-linux-chardonnay-amd64\\.deb",
"LinuxAvalonia_RPM": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-linux-chardonnay-amd64\\.rpm",
"MacOSAvalonia": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-macOS-chardonnay-x64\\.tgz",
"LinuxAvalonia_Arm64": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-linux-chardonnay-arm64\\.deb",
"LinuxAvalonia_Arm64_RPM": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-linux-chardonnay-arm64\\.rpm",
"MacOSAvalonia_Arm64": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-macOS-chardonnay-arm64\\.tgz"
}

32
.vscode/launch.json vendored
View File

@ -1,32 +0,0 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": ".NET Core Launch (console) Windows",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
"program": "${workspaceFolder}/Source/bin/Avalonia/Debug/Libation.dll",
"args": [],
"cwd": "${workspaceFolder}",
"stopAtEntry": false,
"console": "internalConsole"
},
{
"name": ".NET Core Launch (console) Linux",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build_linux",
"program": "${workspaceFolder}/Source/bin/Avalonia/Debug/Libation.dll",
"args": [],
"cwd": "${workspaceFolder}",
"stopAtEntry": false,
"console": "internalConsole"
}
]
}

59
.vscode/tasks.json vendored
View File

@ -1,59 +0,0 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "build",
"dependsOn": [
"build_libation",
"build_linuxconfigapp"
]
},
{
"label": "build_libation",
"type": "shell",
"command": "dotnet",
"args": [
"build",
"${workspaceFolder}/Source/LibationAvalonia/LibationAvalonia.csproj"
],
"group": "build",
"presentation": {
//"reveal": "silent"
},
"problemMatcher": "$msCompile"
},
{
"label": "build_linuxconfigapp",
"type": "shell",
"command": "dotnet",
"args": [
"build",
"${workspaceFolder}/Source/LoadByOS/LinuxConfigApp/LinuxConfigApp.csproj"
],
"group": "build",
"presentation": {
//"reveal": "silent"
},
"problemMatcher": "$msCompile"
},
{
"label": "build_linux",
"type": "shell",
"command": "dotnet",
"args": [
"build",
"${workspaceFolder}/Source/LibationAvalonia/LibationAvalonia.csproj",
"-p:TargetFramework=net9.0",
"-p:TargetFrameworks=net9.0",
"-p:RuntimeIdentifier=linux-x64"
],
"group": "build",
"presentation": {
//"reveal": "silent"
},
"problemMatcher": "$msCompile"
}
]
}

View File

@ -0,0 +1,123 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netstandard2.1</TargetFramework>
</PropertyGroup>
<ItemGroup>
<Reference Include="taglib-sharp">
<HintPath>lib\taglib-sharp.dll</HintPath>
</Reference>
</ItemGroup>
<ItemGroup>
<Folder Include="..\..\..\..\..\..\Dinah%2527s folder\coding\_NET\Visual Studio 2019\Libation\AaxDecrypter\UNTESTED\BytesCrackerLib\" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\Dinah.Core\Dinah.Core\Dinah.Core.csproj" />
</ItemGroup>
<ItemGroup>
<None Update="BytesCrackerLib\alglib1.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\audible_byte#4-4_0_10000x789935_0.rtc">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\audible_byte#4-4_1_10000x791425_0.rtc">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\audible_byte#4-4_2_10000x790991_0.rtc">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\audible_byte#4-4_3_10000x792120_0.rtc">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\audible_byte#4-4_4_10000x790743_0.rtc">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\audible_byte#4-4_5_10000x790568_0.rtc">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\audible_byte#4-4_6_10000x791458_0.rtc">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\audible_byte#4-4_7_10000x791707_0.rtc">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\audible_byte#4-4_8_10000x790202_0.rtc">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\audible_byte#4-4_9_10000x791022_0.rtc">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\ffmpeg.exe">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\ffprobe.exe">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="BytesCrackerLib\rcrack.exe">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\AtomicParsley.exe">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\avcodec-57.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\avdevice-57.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\avfilter-6.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\avformat-57.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\avutil-55.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\cygcrypto-1.0.0.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\cyggcc_s-1.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\cygmp4v2-2.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\cygstdc++-6.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\cygwin1.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\cygz.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\ffmpeg.exe">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\ffprobe.exe">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\mp4trackdump.exe">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\postproc-54.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\swresample-2.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\swscale-4.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="DecryptLib\taglib-sharp.dll">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,355 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Dinah.Core;
using Dinah.Core.Diagnostics;
using Dinah.Core.IO;
using Dinah.Core.StepRunner;
namespace AaxDecrypter
{
public interface ISimpleAaxToM4bConverter
{
event EventHandler<int> DecryptProgressUpdate;
bool Run();
string AppName { get; set; }
string inputFileName { get; }
byte[] coverBytes { get; }
string outDir { get; }
string outputFileName { get; }
Chapters chapters { get; }
Tags tags { get; }
EncodingInfo encodingInfo { get; }
void SetOutputFilename(string outFileName);
}
public interface IAdvancedAaxToM4bConverter : ISimpleAaxToM4bConverter
{
bool Step1_CreateDir();
bool Step2_DecryptAax();
bool Step3_Chapterize();
bool Step4_InsertCoverArt();
bool Step5_Cleanup();
bool Step6_AddTags();
bool End_CreateCue();
bool End_CreateNfo();
}
/// <summary>full c# app. integrated logging. no UI</summary>
public class AaxToM4bConverter : IAdvancedAaxToM4bConverter
{
public event EventHandler<int> DecryptProgressUpdate;
public string inputFileName { get; }
public string decryptKey { get; private set; }
private StepSequence steps { get; }
public byte[] coverBytes { get; private set; }
public string AppName { get; set; } = nameof(AaxToM4bConverter);
public string outDir { get; private set; }
public string outputFileName { get; private set; }
public Chapters chapters { get; private set; }
public Tags tags { get; private set; }
public EncodingInfo encodingInfo { get; private set; }
public static async Task<AaxToM4bConverter> CreateAsync(string inputFile, string decryptKey)
{
var converter = new AaxToM4bConverter(inputFile, decryptKey);
await converter.prelimProcessing();
converter.printPrelim();
return converter;
}
private AaxToM4bConverter(string inputFile, string decryptKey)
{
if (string.IsNullOrWhiteSpace(inputFile)) throw new ArgumentNullException(nameof(inputFile), "Input file may not be null or whitespace");
if (!File.Exists(inputFile)) throw new ArgumentNullException(nameof(inputFile), "File does not exist");
steps = new StepSequence
{
Name = "Convert Aax To M4b",
["Step 1: Create Dir"] = Step1_CreateDir,
["Step 2: Decrypt Aax"] = Step2_DecryptAax,
["Step 3: Chapterize and tag"] = Step3_Chapterize,
["Step 4: Insert Cover Art"] = Step4_InsertCoverArt,
["Step 5: Cleanup"] = Step5_Cleanup,
["Step 6: Add Tags"] = Step6_AddTags,
["End: Create Cue"] = End_CreateCue,
["End: Create Nfo"] = End_CreateNfo
};
this.inputFileName = inputFile;
this.decryptKey = decryptKey;
}
private async Task prelimProcessing()
{
this.tags = new Tags(this.inputFileName);
this.encodingInfo = new EncodingInfo(this.inputFileName);
this.chapters = new Chapters(this.inputFileName, this.tags.duration.TotalSeconds);
var defaultFilename = Path.Combine(
Path.GetDirectoryName(this.inputFileName),
getASCIITag(this.tags.author),
getASCIITag(this.tags.title) + ".m4b"
);
SetOutputFilename(defaultFilename);
await Task.Run(() => saveCover(inputFileName));
}
private string getASCIITag(string property)
{
foreach (char ch in new string(Path.GetInvalidFileNameChars()) + new string(Path.GetInvalidPathChars()))
property = property.Replace(ch.ToString(), "");
return property;
}
private void saveCover(string aaxFile)
{
using var file = TagLib.File.Create(aaxFile, "audio/mp4", TagLib.ReadStyle.Average);
this.coverBytes = file.Tag.Pictures[0].Data.Data;
}
private void printPrelim()
{
Console.WriteLine("Audible Book ID = " + tags.id);
Console.WriteLine("Book: " + tags.title);
Console.WriteLine("Author: " + tags.author);
Console.WriteLine("Narrator: " + tags.narrator);
Console.WriteLine("Year: " + tags.year);
Console.WriteLine("Total Time: "
+ tags.duration.GetTotalTimeFormatted()
+ " in " + chapters.Count() + " chapters");
Console.WriteLine("WARNING-Source is "
+ encodingInfo.originalBitrate + " kbits @ "
+ encodingInfo.sampleRate + "Hz, "
+ encodingInfo.channels + " channels");
}
public bool Run()
{
var (IsSuccess, Elapsed) = steps.Run();
if (!IsSuccess)
{
Console.WriteLine("WARNING-Conversion failed");
return false;
}
var speedup = (int)(tags.duration.TotalSeconds / (long)Elapsed.TotalSeconds);
Console.WriteLine("Speedup is " + speedup + "x realtime.");
Console.WriteLine("Done");
return true;
}
public void SetOutputFilename(string outFileName)
{
this.outputFileName = outFileName;
if (Path.GetExtension(this.outputFileName) != ".m4b")
this.outputFileName = outputFileWithNewExt(".m4b");
this.outDir = Path.GetDirectoryName(this.outputFileName);
}
private string outputFileWithNewExt(string extension)
=> Path.Combine(this.outDir, Path.GetFileNameWithoutExtension(this.outputFileName) + '.' + extension.Trim('.'));
public bool Step1_CreateDir()
{
ProcessRunner.WorkingDir = this.outDir;
Directory.CreateDirectory(this.outDir);
return true;
}
public bool Step2_DecryptAax()
{
DecryptProgressUpdate?.Invoke(this, 0);
var tempRipFile = Path.Combine(this.outDir, "funny.aac");
var fail = "WARNING-Decrypt failure. ";
int returnCode;
if (string.IsNullOrWhiteSpace(decryptKey))
{
returnCode = getKey_decrypt(tempRipFile);
}
else
{
returnCode = decrypt(tempRipFile);
if (returnCode == -99)
{
Console.WriteLine($"{fail}Incorrect decrypt key: {decryptKey}");
this.decryptKey = null;
returnCode = getKey_decrypt(tempRipFile);
}
}
if (returnCode == 100)
Console.WriteLine($"{fail}Thread completed without changing return code. This shouldn't be possible");
else if (returnCode == 0)
{
// success!
FileExt.SafeMove(tempRipFile, outputFileWithNewExt(".mp4"));
DecryptProgressUpdate?.Invoke(this, 100);
return true;
}
else if (returnCode == -99)
Console.WriteLine($"{fail}Incorrect decrypt key: {decryptKey}");
else // any other returnCode
Console.WriteLine($"{fail}Unknown failure code: {returnCode}");
FileExt.SafeDelete(tempRipFile);
DecryptProgressUpdate?.Invoke(this, 0);
return false;
}
private int getKey_decrypt(string tempRipFile)
{
getKey();
return decrypt(tempRipFile);
}
private void getKey()
{
Console.WriteLine("Discovering decrypt key");
Console.WriteLine("Getting file hash");
var checksum = BytesCracker.GetChecksum(inputFileName);
Console.WriteLine("File hash calculated: " + checksum);
Console.WriteLine("Cracking activation bytes");
var activation_bytes = BytesCracker.GetActivationBytes(checksum);
this.decryptKey = activation_bytes;
Console.WriteLine("Activation bytes cracked. Decrypt key: " + activation_bytes);
}
private int decrypt(string tempRipFile)
{
FileExt.SafeDelete(tempRipFile);
Console.WriteLine("Decrypting with key " + decryptKey);
var returnCode = 100;
var thread = new Thread(() => returnCode = this.ngDecrypt());
thread.Start();
double fileLen = new FileInfo(this.inputFileName).Length;
while (thread.IsAlive && returnCode == 100)
{
Thread.Sleep(500);
if (File.Exists(tempRipFile))
{
double tempLen = new FileInfo(tempRipFile).Length;
var percentProgress = tempLen / fileLen * 100.0;
DecryptProgressUpdate?.Invoke(this, (int)percentProgress);
}
}
return returnCode;
}
private int ngDecrypt()
{
var info = new ProcessStartInfo
{
FileName = DecryptSupportLibraries.mp4trackdumpPath,
Arguments = "-c " + this.encodingInfo.channels + " -r " + this.encodingInfo.sampleRate + " \"" + this.inputFileName + "\""
};
info.EnvironmentVariables["VARIABLE"] = decryptKey;
var (output, exitCode) = info.RunHidden();
// bad checksum -- bad decrypt key
if (output.Contains("checksums mismatch, aborting!"))
return -99;
return exitCode;
}
// temp file names for steps 3, 4, 5
string tempChapsPath => Path.Combine(this.outDir, "tempChaps.mp4");
string mp4_file => outputFileWithNewExt(".mp4");
string ff_txt_file => mp4_file + ".ff.txt";
public bool Step3_Chapterize()
{
string str1 = "";
if (this.chapters.FirstChapterStart != 0.0)
{
str1 = " -ss " + this.chapters.FirstChapterStart.ToString("0.000", CultureInfo.InvariantCulture) + " -t " + (this.chapters.LastChapterStart - 1.0).ToString("0.000", CultureInfo.InvariantCulture) + " ";
}
string ffmpegTags = this.tags.GenerateFfmpegTags();
string ffmpegChapters = this.chapters.GenerateFfmpegChapters();
File.WriteAllText(ff_txt_file, ffmpegTags + ffmpegChapters);
var tagAndChapterInfo = new ProcessStartInfo
{
FileName = DecryptSupportLibraries.ffmpegPath,
Arguments = "-y -i \"" + mp4_file + "\" -f ffmetadata -i \"" + ff_txt_file + "\" -map_metadata 1 -bsf:a aac_adtstoasc -c:a copy" + str1 + " -map 0 \"" + tempChapsPath + "\""
};
tagAndChapterInfo.RunHidden();
return true;
}
public bool Step4_InsertCoverArt()
{
// save cover image as temp file
var coverPath = Path.Combine(this.outDir, "cover-" + Guid.NewGuid() + ".jpg");
FileExt.CreateFile(coverPath, this.coverBytes);
var insertCoverArtInfo = new ProcessStartInfo
{
FileName = DecryptSupportLibraries.atomicParsleyPath,
Arguments = "\"" + tempChapsPath + "\" --encodingTool \"" + AppName + "\" --artwork \"" + coverPath + "\" --overWrite"
};
insertCoverArtInfo.RunHidden();
// delete temp file
FileExt.SafeDelete(coverPath);
return true;
}
public bool Step5_Cleanup()
{
FileExt.SafeDelete(mp4_file);
FileExt.SafeDelete(ff_txt_file);
FileExt.SafeMove(tempChapsPath, this.outputFileName);
return true;
}
public bool Step6_AddTags()
{
this.tags.AddAppleTags(this.outputFileName);
return true;
}
public bool End_CreateCue()
{
File.WriteAllText(outputFileWithNewExt(".cue"), this.chapters.GetCuefromChapters(Path.GetFileName(this.outputFileName)));
return true;
}
public bool End_CreateNfo()
{
File.WriteAllText(outputFileWithNewExt(".nfo"), NFO.CreateNfoContents(AppName, this.tags, this.encodingInfo, this.chapters));
return true;
}
}
}

View File

@ -0,0 +1,53 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Text;
using System.Threading.Tasks;
using Dinah.Core;
using Dinah.Core.Diagnostics;
namespace AaxDecrypter
{
public static class BytesCracker
{
public static string GetChecksum(string aaxPath)
{
var info = new ProcessStartInfo
{
FileName = BytesCrackerSupportLibraries.ffprobePath,
Arguments = aaxPath.SurroundWithQuotes(),
WorkingDirectory = Directory.GetCurrentDirectory()
};
// checksum is in the debug info. ffprobe's debug info is written to stderr, not stdout
var readErrorOutput = true;
var ffprobeStderr = info.RunHidden(readErrorOutput).Output;
// example checksum line:
// ... [aax] file checksum == 0c527840c4f18517157eb0b4f9d6f9317ce60cd1
var checksum = ffprobeStderr.ExtractString("file checksum == ", 40);
return checksum;
}
/// <summary>use checksum to get activation bytes. activation bytes are unique per audible customer. only have to do this 1x/customer</summary>
public static string GetActivationBytes(string checksum)
{
var info = new ProcessStartInfo
{
FileName = BytesCrackerSupportLibraries.rcrackPath,
Arguments = @". -h " + checksum,
WorkingDirectory = Directory.GetCurrentDirectory()
};
var rcrackStdout = info.RunHidden().Output;
// example result
// 0c527840c4f18517157eb0b4f9d6f9317ce60cd1 \xbd\x89X\x09 hex:bd895809
var activation_bytes = rcrackStdout.ExtractString("hex:", 8);
return activation_bytes;
}
}
}

View File

@ -0,0 +1,28 @@
using System.IO;
namespace AaxDecrypter
{
public static class BytesCrackerSupportLibraries
{
// GetActivationBytes dependencies
// rcrack.exe
// alglib1.dll
// RainbowCrack files to recover your own Audible activation data (activation_bytes) in an offline manner
// audible_byte#4-4_0_10000x789935_0.rtc
// audible_byte#4-4_1_10000x791425_0.rtc
// audible_byte#4-4_2_10000x790991_0.rtc
// audible_byte#4-4_3_10000x792120_0.rtc
// audible_byte#4-4_4_10000x790743_0.rtc
// audible_byte#4-4_5_10000x790568_0.rtc
// audible_byte#4-4_6_10000x791458_0.rtc
// audible_byte#4-4_7_10000x791707_0.rtc
// audible_byte#4-4_8_10000x790202_0.rtc
// audible_byte#4-4_9_10000x791022_0.rtc
private static string appPath_ { get; } = Path.GetDirectoryName(Dinah.Core.Exe.FileLocationOnDisk);
private static string bytesCrackerLib_ { get; } = Path.Combine(appPath_, "BytesCrackerLib");
public static string ffprobePath { get; } = Path.Combine(bytesCrackerLib_, "ffprobe.exe");
public static string rcrackPath { get; } = Path.Combine(bytesCrackerLib_, "rcrack.exe");
}
}

View File

@ -0,0 +1,95 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.Linq;
using System.Text;
using Dinah.Core.Diagnostics;
namespace AaxDecrypter
{
public class Chapters
{
private List<double> markers { get; }
public double FirstChapterStart => markers[0];
public double LastChapterStart => markers[markers.Count - 1];
public Chapters(string file, double totalTime)
{
this.markers = getAAXChapters(file);
// add end time
this.markers.Add(totalTime);
}
private static List<double> getAAXChapters(string file)
{
var info = new ProcessStartInfo
{
FileName = DecryptSupportLibraries.ffprobePath,
Arguments = "-loglevel panic -show_chapters -print_format xml \"" + file + "\""
};
var xml = info.RunHidden().Output;
var xmlDocument = new System.Xml.XmlDocument();
xmlDocument.LoadXml(xml);
var chapters = xmlDocument.SelectNodes("/ffprobe/chapters/chapter")
.Cast<System.Xml.XmlNode>()
.Select(xmlNode => double.Parse(xmlNode.Attributes["start_time"].Value.Replace(",", "."), CultureInfo.InvariantCulture))
.ToList();
return chapters;
}
// subtract 1 b/c end time marker is a real entry but isn't a real chapter
public int Count() => this.markers.Count - 1;
public string GetCuefromChapters(string fileName)
{
var stringBuilder = new StringBuilder();
if (fileName != "")
{
stringBuilder.Append("FILE \"" + fileName + "\" MP4\n");
}
for (var i = 0; i < Count(); i++)
{
var chapter = i + 1;
var timeSpan = TimeSpan.FromSeconds(this.markers[i]);
var minutes = Math.Floor(timeSpan.TotalMinutes).ToString();
var seconds = timeSpan.Seconds.ToString("D2");
var milliseconds = (timeSpan.Milliseconds / 10).ToString("D2");
string str = minutes + ":" + seconds + ":" + milliseconds;
stringBuilder.Append("TRACK " + chapter + " AUDIO\n");
stringBuilder.Append(" TITLE \"Chapter " + chapter.ToString("D2") + "\"\n");
stringBuilder.Append(" INDEX 01 " + str + "\n");
}
return stringBuilder.ToString();
}
public string GenerateFfmpegChapters()
{
var stringBuilder = new StringBuilder();
for (var i = 0; i < Count(); i++)
{
var chapter = i + 1;
var start = this.markers[i] * 1000.0;
var end = this.markers[i + 1] * 1000.0;
var chapterName = chapter.ToString("D3");
stringBuilder.Append("[CHAPTER]\n");
stringBuilder.Append("TIMEBASE=1/1000\n");
stringBuilder.Append("START=" + start + "\n");
stringBuilder.Append("END=" + end + "\n");
stringBuilder.Append("title=" + chapterName + "\n");
}
return stringBuilder.ToString();
}
}
}

View File

@ -0,0 +1,21 @@
using System.IO;
namespace AaxDecrypter
{
public static class DecryptSupportLibraries
{
// OTHER EXTERNAL DEPENDENCIES
// ffprobe has these pre-req.s as I'm using it:
// avcodec-57.dll, avdevice-57.dll, avfilter-6.dll, avformat-57.dll, avutil-55.dll, postproc-54.dll, swresample-2.dll, swscale-4.dll, taglib-sharp.dll
//
// something else needs the cygwin files (cyg*.dll)
private static string appPath_ { get; } = Path.GetDirectoryName(Dinah.Core.Exe.FileLocationOnDisk);
private static string decryptLib_ { get; } = Path.Combine(appPath_, "DecryptLib");
public static string ffmpegPath { get; } = Path.Combine(decryptLib_, "ffmpeg.exe");
public static string ffprobePath { get; } = Path.Combine(decryptLib_, "ffprobe.exe");
public static string atomicParsleyPath { get; } = Path.Combine(decryptLib_, "AtomicParsley.exe");
public static string mp4trackdumpPath { get; } = Path.Combine(decryptLib_, "mp4trackdump.exe");
}
}

View File

@ -0,0 +1,41 @@
using System;
using System.Diagnostics;
using Dinah.Core.Diagnostics;
namespace AaxDecrypter
{
public class EncodingInfo
{
public int sampleRate { get; } = 44100;
public int channels { get; } = 2;
public int originalBitrate { get; }
public EncodingInfo(string file)
{
var info = new ProcessStartInfo
{
FileName = DecryptSupportLibraries.ffprobePath,
Arguments = "-loglevel panic -show_streams -print_format flat \"" + file + "\""
};
var end = info.RunHidden().Output;
foreach (string str2 in end.Split('\n'))
{
string[] strArray = str2.Split('=');
switch (strArray[0])
{
case "streams.stream.0.channels":
this.channels = int.Parse(strArray[1].Replace("\"", "").TrimEnd('\r', '\n'));
break;
case "streams.stream.0.sample_rate":
this.sampleRate = int.Parse(strArray[1].Replace("\"", "").TrimEnd('\r', '\n'));
break;
case "streams.stream.0.bit_rate":
string s = strArray[1].Replace("\"", "").TrimEnd('\r', '\n');
this.originalBitrate = (int)Math.Round(double.Parse(s) / 1000.0, MidpointRounding.AwayFromZero);
break;
}
}
}
}
}

View File

@ -0,0 +1,56 @@
namespace AaxDecrypter
{
public static class NFO
{
public static string CreateNfoContents(string ripper, Tags tags, EncodingInfo encodingInfo, Chapters chapters)
{
int _hours = (int)tags.duration.TotalHours;
string myDuration
= (_hours > 0 ? _hours + " hours, " : "")
+ tags.duration.Minutes + " minutes, "
+ tags.duration.Seconds + " seconds";
string str4
= "General Information\r\n"
+ "===================\r\n"
+ " Title: " + tags.title + "\r\n"
+ " Author: " + tags.author + "\r\n"
+ " Read By: " + tags.narrator + "\r\n"
+ " Copyright: " + tags.year + "\r\n"
+ " Audiobook Copyright: " + tags.year + "\r\n";
if (tags.genre != "")
{
str4 = str4 + " Genre: " + tags.genre + "\r\n";
}
string s
= str4
+ " Publisher: " + tags.publisher + "\r\n"
+ " Duration: " + myDuration + "\r\n"
+ " Chapters: " + chapters.Count() + "\r\n"
+ "\r\n"
+ "\r\n"
+ "Media Information\r\n"
+ "=================\r\n"
+ " Source Format: Audible AAX\r\n"
+ " Source Sample Rate: " + encodingInfo.sampleRate + " Hz\r\n"
+ " Source Channels: " + encodingInfo.channels + "\r\n"
+ " Source Bitrate: " + encodingInfo.originalBitrate + " kbits\r\n"
+ "\r\n"
+ " Lossless Encode: Yes\r\n"
+ " Encoded Codec: AAC / M4B\r\n"
+ " Encoded Sample Rate: " + encodingInfo.sampleRate + " Hz\r\n"
+ " Encoded Channels: " + encodingInfo.channels + "\r\n"
+ " Encoded Bitrate: " + encodingInfo.originalBitrate + " kbits\r\n"
+ "\r\n"
+ " Ripper: " + ripper + "\r\n"
+ "\r\n"
+ "\r\n"
+ "Book Description\r\n"
+ "================\r\n"
+ tags.comments;
return s;
}
}
}

View File

@ -0,0 +1,74 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Text;
using TagLib;
using TagLib.Mpeg4;
using Dinah.Core;
namespace AaxDecrypter
{
public class Tags
{
public string title { get; }
public string album { get; }
public string author { get; }
public string comments { get; }
public string narrator { get; }
public string year { get; }
public string publisher { get; }
public string id { get; }
public string genre { get; }
public TimeSpan duration { get; }
public Tags(string file)
{
using TagLib.File tagLibFile = TagLib.File.Create(file, "audio/mp4", ReadStyle.Average);
this.title = tagLibFile.Tag.Title.Replace(" (Unabridged)", "");
this.album = tagLibFile.Tag.Album.Replace(" (Unabridged)", "");
this.author = tagLibFile.Tag.FirstPerformer;
this.year = tagLibFile.Tag.Year.ToString();
this.comments = tagLibFile.Tag.Comment;
this.duration = tagLibFile.Properties.Duration;
this.genre = tagLibFile.Tag.FirstGenre;
var tag = tagLibFile.GetTag(TagTypes.Apple, true);
this.publisher = tag.Publisher;
this.narrator = string.IsNullOrWhiteSpace(tagLibFile.Tag.FirstComposer) ? tag.Narrator : tagLibFile.Tag.FirstComposer;
this.comments = !string.IsNullOrWhiteSpace(tag.LongDescription) ? tag.LongDescription : tag.Description;
this.id = tag.AudibleCDEK;
}
public void AddAppleTags(string file)
{
using var file1 = TagLib.File.Create(file, "audio/mp4", ReadStyle.Average);
var tag = (AppleTag)file1.GetTag(TagTypes.Apple, true);
tag.Publisher = this.publisher;
tag.LongDescription = this.comments;
tag.Description = this.comments;
file1.Save();
}
public string GenerateFfmpegTags()
{
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.Append(";FFMETADATA1\n");
stringBuilder.Append("major_brand=aax\n");
stringBuilder.Append("minor_version=1\n");
stringBuilder.Append("compatible_brands=aax M4B mp42isom\n");
stringBuilder.Append("date=" + this.year + "\n");
stringBuilder.Append("genre=" + this.genre + "\n");
stringBuilder.Append("title=" + this.title + "\n");
stringBuilder.Append("artist=" + this.author + "\n");
stringBuilder.Append("album=" + this.album + "\n");
stringBuilder.Append("composer=" + this.narrator + "\n");
stringBuilder.Append("comment=" + this.comments.Truncate(254) + "\n");
stringBuilder.Append("description=" + this.comments + "\n");
return stringBuilder.ToString();
}
}
}

View File

@ -0,0 +1,15 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netstandard2.1</TargetFramework>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\audible api\AudibleApi\AudibleApiDTOs\AudibleApiDTOs.csproj" />
<ProjectReference Include="..\..\audible api\AudibleApi\AudibleApi\AudibleApi.csproj" />
<ProjectReference Include="..\DtoImporterService\DtoImporterService.csproj" />
<ProjectReference Include="..\InternalUtilities\InternalUtilities.csproj" />
<ProjectReference Include="..\LibationSearchEngine\LibationSearchEngine.csproj" />
</ItemGroup>
</Project>

View File

@ -0,0 +1,25 @@
using System;
using System.Threading.Tasks;
using AudibleApi;
using DtoImporterService;
using InternalUtilities;
namespace ApplicationService
{
public class LibraryIndexer
{
public async Task<(int totalCount, int newCount)> IndexAsync(ILoginCallback callback)
{
var audibleApiActions = new AudibleApiActions();
var items = await audibleApiActions.GetAllLibraryItemsAsync(callback);
var totalCount = items.Count;
var libImporter = new LibraryImporter();
var newCount = await Task.Run(() => libImporter.Import(items));
await SearchEngineActions.FullReIndexAsync();
return (totalCount, newCount);
}
}
}

View File

@ -0,0 +1,26 @@
using System.Threading.Tasks;
using DataLayer;
namespace ApplicationService
{
public static class SearchEngineActions
{
public static async Task FullReIndexAsync()
{
var engine = new LibationSearchEngine.SearchEngine();
await engine.CreateNewIndexAsync().ConfigureAwait(false);
}
public static void UpdateBookTags(Book book)
{
var engine = new LibationSearchEngine.SearchEngine();
engine.UpdateTags(book.AudibleProductId, book.UserDefinedItem.Tags);
}
public static async Task ProductReIndexAsync(string productId)
{
var engine = new LibationSearchEngine.SearchEngine();
await engine.UpdateBookAsync(productId).ConfigureAwait(false);
}
}
}

View File

@ -0,0 +1,11 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netstandard2.1</TargetFramework>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\FileManager\FileManager.csproj" />
</ItemGroup>
</Project>

View File

@ -0,0 +1,66 @@
using System;
using System.Linq;
using Dinah.Core;
namespace AudibleDotCom
{
public enum AudiblePageType
{
ProductDetails = 1,
Library = 2
}
public static class AudiblePageExt
{
public static AudiblePage GetAudiblePageRobust(this AudiblePageType audiblePage) => AudiblePage.FromPageType(audiblePage);
}
public abstract partial class AudiblePage : Enumeration<AudiblePage>
{
// useful for generic classes:
// public abstract class PageScraper<T> where T : AudiblePageRobust {
// public AudiblePage AudiblePage => AudiblePageRobust.GetAudiblePageFromType(typeof(T));
public static AudiblePageType GetAudiblePageFromType(Type audiblePageRobustType)
=> (AudiblePageType)GetAll().Single(t => t.GetType() == audiblePageRobustType).Id;
public AudiblePageType AudiblePageType { get; }
protected AudiblePage(AudiblePageType audiblePage, string abbreviation) : base((int)audiblePage, abbreviation) => AudiblePageType = audiblePage;
public static AudiblePage FromPageType(AudiblePageType audiblePage) => FromValue((int)audiblePage);
/// <summary>For pages which need a param, the param is marked with {0}</summary>
protected abstract string Url { get; }
public string GetUrl(string id) => string.Format(Url, id);
public string Abbreviation => DisplayName;
}
public abstract partial class AudiblePage : Enumeration<AudiblePage>
{
public static AudiblePage Library { get; } = LibraryPage.Instance;
public class LibraryPage : AudiblePage
{
#region singleton stuff
public static LibraryPage Instance { get; } = new LibraryPage();
static LibraryPage() { }
private LibraryPage() : base(AudiblePageType.Library, "LIB") { }
#endregion
protected override string Url => "http://www.audible.com/lib";
}
}
public abstract partial class AudiblePage : Enumeration<AudiblePage>
{
public static AudiblePage Product { get; } = ProductDetailPage.Instance;
public class ProductDetailPage : AudiblePage
{
#region singleton stuff
public static ProductDetailPage Instance { get; } = new ProductDetailPage();
static ProductDetailPage() { }
private ProductDetailPage() : base(AudiblePageType.ProductDetails, "PD") { }
#endregion
protected override string Url => "http://www.audible.com/pd/{0}";
}
}
}

View File

@ -0,0 +1,43 @@
using FileManager;
namespace AudibleDotCom
{
public class AudiblePageSource
{
public AudiblePageType AudiblePage { get; }
public string Source { get; }
public string PageId { get; }
public AudiblePageSource(AudiblePageType audiblePage, string source, string pageId)
{
AudiblePage = audiblePage;
Source = source;
PageId = pageId;
}
/// <summary>declawed allows local file to safely be reloaded in chrome
/// NOTE ABOUT DECLAWED FILES
/// making them safer also breaks functionality
/// eg: previously hidden parts become visible. this changes how selenium can parse pages.
/// hidden elements don't expose .Text property</summary>
public AudiblePageSource Declawed() => new AudiblePageSource(AudiblePage, FileUtility.Declaw(Source), PageId);
public string Serialized() => $"<!-- |{AudiblePage.GetAudiblePageRobust().Abbreviation}|{(PageId ?? "").Trim()}| -->\r\n" + Source;
public static AudiblePageSource Deserialize(string serializedSource)
{
var endOfLine1 = serializedSource.IndexOf('\n');
var parameters = serializedSource
.Substring(0, endOfLine1)
.Split('|');
var abbrev = parameters[1];
var pageId = parameters[2];
var source = serializedSource.Substring(endOfLine1 + 1);
var audiblePage = AudibleDotCom.AudiblePage.FromDisplayName(abbrev).AudiblePageType;
return new AudiblePageSource(audiblePage, source, pageId);
}
}
}

View File

@ -0,0 +1,23 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netstandard2.1</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Selenium.Support" Version="3.141.0" />
<PackageReference Include="Selenium.WebDriver" Version="3.141.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\AudibleDotCom\AudibleDotCom.csproj" />
<ProjectReference Include="..\CookieMonster\CookieMonster.csproj" />
</ItemGroup>
<ItemGroup>
<None Update="chromedriver.exe">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>

View File

@ -0,0 +1,184 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using AudibleDotCom;
using Dinah.Core.Humanizer;
using OpenQA.Selenium;
using OpenQA.Selenium.Chrome;
using OpenQA.Selenium.Support.UI;
namespace AudibleDotComAutomation
{
/// <summary>browser manipulation. web driver access
/// browser operators. create and store web driver, browser navigation which can vary depending on whether anon or auth'd
///
/// this base class: is online. no auth. used for most pages. retain no chrome cookies</summary>
public abstract class SeleniumRetriever : IPageRetriever
{
#region // chrome driver details
/*
HIDING CHROME CONSOLE WINDOW
hiding chrome console window has proven to cause more headaches than it solves. here's how to do it though:
// can also use CreateDefaultService() overloads to specify driver path and/or file name
var chromeDriverService = ChromeDriverService.CreateDefaultService();
chromeDriverService.HideCommandPromptWindow = true;
return new ChromeDriver(chromeDriverService, options);
HEADLESS CHROME
this WOULD be how to do headless. but amazon/audible are far too tricksy about their changes and anti-scraping measures
which renders 'headless' mode useless
var options = new ChromeOptions();
options.AddArgument("--headless");
SPECIFYING DRIVER LOCATION
if continues to have trouble finding driver:
var driver = new ChromeDriver(@"C:\my\path\to\chromedriver\directory");
var chromeDriverService = ChromeDriverService.CreateDefaultService(@"C:\my\path\to\chromedriver\directory");
*/
#endregion
protected IWebDriver Driver { get; }
Humanizer humanizer { get; } = new Humanizer();
protected SeleniumRetriever()
{
Driver = new ChromeDriver(ctorCreateChromeOptions());
}
/// <summary>no auth. retain no chrome cookies</summary>
protected virtual ChromeOptions ctorCreateChromeOptions() => new ChromeOptions();
protected async Task AudibleLinkClickAsync(IWebElement element)
{
// EACH CALL to audible should have a small random wait to reduce chances of scrape detection
await humanizer.Wait();
await Task.Run(() => Driver.Click(element));
await waitForSpinnerAsync();
// sometimes these clicks just take a while. add a few more seconds
await Task.Delay(5000);
}
By spinnerLocator { get; } = By.Id("library-main-overlay");
private async Task waitForSpinnerAsync()
{
// if loading overlay w/spinner exists: pause, wait for it to end
await Task.Delay(100);
if (Driver.FindElements(spinnerLocator).Count > 0)
new WebDriverWait(Driver, TimeSpan.FromSeconds(60))
.Until(ExpectedConditions.InvisibilityOfElementLocated(spinnerLocator));
}
private bool isFirstRun = true;
protected virtual async Task FirstRunAsync()
{
// load with no beginning wait. then wait 7 seconds to allow for page flicker. it usually happens after ~5 seconds. can happen irrespective of login state
await Task.Run(() => Driver.Navigate().GoToUrl("http://www.audible.com/"));
await Task.Delay(7000);
}
public async Task<IEnumerable<AudiblePageSource>> GetPageSourcesAsync(AudiblePageType audiblePage, string pageId = null)
{
if (isFirstRun)
{
await FirstRunAsync();
isFirstRun = false;
}
await initFirstPageAsync(audiblePage, pageId);
return await processUrl(audiblePage, pageId);
}
private async Task initFirstPageAsync(AudiblePageType audiblePage, string pageId)
{
// EACH CALL to audible should have a small random wait to reduce chances of scrape detection
await humanizer.Wait();
var url = audiblePage.GetAudiblePageRobust().GetUrl(pageId);
await Task.Run(() => Driver.Navigate().GoToUrl(url));
await waitForSpinnerAsync();
}
private async Task<IEnumerable<AudiblePageSource>> processUrl(AudiblePageType audiblePage, string pageId)
{
var pageSources = new List<AudiblePageSource>();
do
{
pageSources.Add(new AudiblePageSource(audiblePage, Driver.PageSource, pageId));
}
while (await hasMorePagesAsync());
return pageSources;
}
#region has more pages
/// <summary>if no more pages, return false. else, navigate to next page and return true</summary>
private async Task<bool> hasMorePagesAsync()
{
var next = //old_hasMorePages() ??
new_hasMorePages();
if (next == null)
return false;
await AudibleLinkClickAsync(next);
return true;
}
private IWebElement old_hasMorePages()
{
var parentElements = Driver.FindElements(By.ClassName("adbl-page-next"));
if (parentElements.Count == 0)
return null;
var childElements = parentElements[0].FindElements(By.LinkText("NEXT"));
if (childElements.Count != 1)
return null;
return childElements[0];
}
// ~ oct 2017
private IWebElement new_hasMorePages()
{
// get all active/enabled navigation links
var pageNavLinks = Driver.FindElements(By.ClassName("library-load-page"));
if (pageNavLinks.Count == 0)
return null;
// get only the right chevron if active.
// note: there are also right chevrons which are not for wish list navigation which is why we first filter by library-load-page
var nextLink = pageNavLinks
.Where(p => p.FindElements(By.ClassName("bc-icon-chevron-right")).Count > 0)
.ToList(); // cut-off delayed execution
if (nextLink.Count == 0)
return null;
return nextLink.Single().FindElement(By.TagName("button"));
}
#endregion
#region IDisposable pattern
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
if (disposing && Driver != null)
{
// Quit() does cleanup AND disposes
Driver.Quit();
}
}
#endregion
}
}

View File

@ -0,0 +1,26 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using OpenQA.Selenium;
namespace AudibleDotComAutomation
{
/// <summary>for user collections: lib, WL</summary>
public abstract class AuthSeleniumRetriever : SeleniumRetriever
{
protected bool IsLoggedIn => GetListenerPageLink() != null;
// needed?
protected AuthSeleniumRetriever() : base() { }
protected IWebElement GetListenerPageLink()
{
var listenerPageElement = Driver.FindElements(By.XPath("//a[contains(@href, '/review-by-author')]"));
if (listenerPageElement.Count > 0)
return listenerPageElement[0];
return null;
}
}
}

View File

@ -0,0 +1,130 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Threading.Tasks;
using AudibleDotCom;
using CookieMonster;
using Dinah.Core;
using Dinah.Core.Humanizer;
namespace AudibleDotComAutomation
{
public class BrowserlessRetriever : IPageRetriever
{
Humanizer humanizer { get; } = new Humanizer();
public async Task<IEnumerable<AudiblePageSource>> GetPageSourcesAsync(AudiblePageType audiblePage, string pageId = null)
{
switch (audiblePage)
{
case AudiblePageType.Library: return await getLibraryPageSourcesAsync();
default: throw new NotImplementedException();
}
}
private async Task<IEnumerable<AudiblePageSource>> getLibraryPageSourcesAsync()
{
var collection = new List<AudiblePageSource>();
var cookies = await getAudibleCookiesAsync();
var currPageNum = 1;
bool hasMorePages;
do
{
// EACH CALL to audible should have a small random wait to reduce chances of scrape detection
await humanizer.Wait();
var html = await getLibraryPageAsync(cookies, currPageNum);
var pageSource = new AudiblePageSource(AudiblePageType.Library, html, null);
collection.Add(pageSource);
hasMorePages = getHasMorePages(pageSource.Source);
currPageNum++;
} while (hasMorePages);
return collection;
}
private static async Task<CookieContainer> getAudibleCookiesAsync()
{
var liveCookies = await CookiesHelper.GetLiveCookieValuesAsync();
var audibleCookies = liveCookies.Where(c
=> c.Domain.ContainsInsensitive("audible.com")
|| c.Domain.ContainsInsensitive("adbl")
|| c.Domain.ContainsInsensitive("amazon.com"))
.ToList();
var cookies = new CookieContainer();
foreach (var c in audibleCookies)
cookies.Add(new Cookie(c.Name, c.Value, "/", c.Domain));
return cookies;
}
private static bool getHasMorePages(string html)
{
var doc = new HtmlAgilityPack.HtmlDocument();
doc.LoadHtml(html);
// final page, invalid page:
// <span class="bc-button
// bc-button-secondary
// nextButton
// bc-button-disabled">
// only page: ???
// has more pages:
// <span class="bc-button
// bc-button-secondary
// refinementFormButton
// nextButton">
var next_active_link = doc
.DocumentNode
.Descendants()
.FirstOrDefault(n =>
n.HasClass("nextButton") &&
!n.HasClass("bc-button-disabled"));
return next_active_link != null;
}
private static async Task<string> getLibraryPageAsync(CookieContainer cookies, int pageNum)
{
#region // POST example (from 2017 ajax)
// var destination = "https://www.audible.com/lib-ajax";
// var webRequest = (HttpWebRequest)WebRequest.Create(destination);
// webRequest.Method = "POST";
// webRequest.Accept = "*/*";
// webRequest.AllowAutoRedirect = false;
// webRequest.UserAgent = "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0; .NET CLR 1.0.3705)";
// webRequest.ContentType = "application/x-www-form-urlencoded; charset=UTF-8";
// webRequest.Credentials = null;
//
// webRequest.CookieContainer = new CookieContainer();
// webRequest.CookieContainer.Add(cookies.GetCookies(new Uri(destination)));
//
// var postData = $"progType=all&timeFilter=all&itemsPerPage={itemsPerPage}&searchTerm=&searchType=&sortColumn=&sortType=down&page={pageNum}&mode=normal&subId=&subTitle=";
// var data = Encoding.UTF8.GetBytes(postData);
// webRequest.ContentLength = data.Length;
// using var dataStream = webRequest.GetRequestStream();
// dataStream.Write(data, 0, data.Length);
#endregion
var destination = "https://" + $"www.audible.com/lib?purchaseDateFilter=all&programFilter=all&sortBy=PURCHASE_DATE.dsc&page={pageNum}";
var webRequest = (HttpWebRequest)WebRequest.Create(destination);
webRequest.UserAgent = "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0; .NET CLR 1.0.3705)";
webRequest.CookieContainer = new CookieContainer();
webRequest.CookieContainer.Add(cookies.GetCookies(new Uri(destination)));
var webResponse = await webRequest.GetResponseAsync();
return new StreamReader(webResponse.GetResponseStream()).ReadToEnd();
}
public void Dispose() { }
}
}

View File

@ -0,0 +1,75 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using OpenQA.Selenium;
using OpenQA.Selenium.Support.UI;
namespace AudibleDotComAutomation
{
/// <summary>online. get auth by logging in with provided username and password
/// retain no chrome cookies. enter user + pw login</summary>
public class ManualLoginSeleniumRetriever : AuthSeleniumRetriever
{
string _username;
string _password;
public ManualLoginSeleniumRetriever(string username, string password) : base()
{
_username = username;
_password = password;
}
protected override async Task FirstRunAsync()
{
await base.FirstRunAsync();
// can't extract this into AuthSeleniumRetriever ctor. can't use username/pw until prev ctors are complete
// click login link
await AudibleLinkClickAsync(getLoginLink());
// wait until login page loads
new WebDriverWait(Driver, TimeSpan.FromSeconds(60)).Until(ExpectedConditions.ElementIsVisible(By.Id("ap_email")));
// insert credentials
Driver
.FindElement(By.Id("ap_email"))
.SendKeys(_username);
Driver
.FindElement(By.Id("ap_password"))
.SendKeys(_password);
// submit
var submitElement
= Driver.FindElements(By.Id("signInSubmit")).FirstOrDefault()
?? Driver.FindElement(By.Id("signInSubmit-input"));
await AudibleLinkClickAsync(submitElement);
// wait until audible page loads
new WebDriverWait(Driver, TimeSpan.FromSeconds(60))
.Until(d => GetListenerPageLink());
if (!IsLoggedIn)
throw new Exception("not logged in");
}
private IWebElement getLoginLink()
{
{
var loginLinkElements1 = Driver.FindElements(By.XPath("//a[contains(@href, '/signin')]"));
if (loginLinkElements1.Any())
return loginLinkElements1[0];
}
//
// ADD ADDITIONAL ACCEPTABLE PATTERNS HERE
//
//{
// var loginLinkElements2 = Driver.FindElements(By.XPath("//a[contains(@href, '/signin')]"));
// if (loginLinkElements2.Any())
// return loginLinkElements2[0];
//}
throw new NotFoundException("Cannot locate login link");
}
}
}

View File

@ -0,0 +1,38 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using OpenQA.Selenium.Chrome;
namespace AudibleDotComAutomation
{
/// <summary>online. load auth, cookies etc from user data</summary>
public class UserDataSeleniumRetriever : AuthSeleniumRetriever
{
public UserDataSeleniumRetriever() : base()
{
// can't extract this into AuthSeleniumRetriever ctor. can't use username/pw until prev ctors are complete
if (!IsLoggedIn)
throw new Exception("not logged in");
}
/// <summary>Use current user data/chrome cookies. DO NOT use if chrome is already open</summary>
protected override ChromeOptions ctorCreateChromeOptions()
{
var options = base.ctorCreateChromeOptions();
// load user data incl cookies. default on windows:
// %LOCALAPPDATA%\Google\Chrome\User Data
// C:\Users\username\AppData\Local\Google\Chrome\User Data
var chromeDefaultWindowsUserDataDir = System.IO.Path.Combine(
Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData),
"Google",
"Chrome",
"User Data");
options.AddArguments($"user-data-dir={chromeDefaultWindowsUserDataDir}");
return options;
}
}
}

View File

@ -0,0 +1,12 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using AudibleDotCom;
namespace AudibleDotComAutomation
{
public interface IPageRetriever : IDisposable
{
Task<IEnumerable<AudiblePageSource>> GetPageSourcesAsync(AudiblePageType audiblePage, string pageId = null);
}
}

View File

@ -0,0 +1,115 @@
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using OpenQA.Selenium;
using OpenQA.Selenium.Support.UI;
namespace AudibleDotComAutomation.Examples
{
public class SeleniumExamples
{
public IWebDriver Driver { get; set; }
IWebElement GetListenerPageLink()
{
var listenerPageElement = Driver.FindElements(By.XPath("//a[contains(@href, '/review-by-author')]"));
if (listenerPageElement.Count > 0)
return listenerPageElement[0];
return null;
}
void wait_examples()
{
new WebDriverWait(Driver, TimeSpan.FromSeconds(60))
.Until(ExpectedConditions.ElementIsVisible(By.Id("mast-member-acct-name")));
new WebDriverWait(Driver, TimeSpan.FromSeconds(60))
.Until(d => GetListenerPageLink());
// https://stackoverflow.com/questions/21339339/how-to-add-custom-expectedconditions-for-selenium
new WebDriverWait(Driver, TimeSpan.FromSeconds(60))
.Until((d) =>
{
// could be refactored into OR, AND per the java selenium library
// check 1
var e1 = Driver.FindElements(By.Id("mast-member-acct-name"));
if (e1.Count > 0)
return e1[0];
// check 2
var e2 = Driver.FindElements(By.Id("header-account-info-0"));
if (e2.Count > 0)
return e2[0];
return null;
});
}
void XPath_examples()
{
// <tr>
// <td>1</td>
// <td>2</td>
// </tr>
// <tr>
// <td>3</td>
// <td>4</td>
// </tr>
ReadOnlyCollection<IWebElement> all_tr = Driver.FindElements(By.XPath("/tr"));
IWebElement first_tr = Driver.FindElement(By.XPath("/tr"));
IWebElement second_tr = Driver.FindElement(By.XPath("/tr[2]"));
// beginning with a single / starts from root
IWebElement ERROR_not_at_root = Driver.FindElement(By.XPath("/td"));
// 2 slashes searches all, NOT just descendants
IWebElement td1 = Driver.FindElement(By.XPath("//td"));
// 2 slashes still searches all, NOT just descendants
IWebElement still_td1 = first_tr.FindElement(By.XPath("//td"));
// dot operator starts from current node specified by first_tr
// single slash: immediate descendant
IWebElement td3 = first_tr.FindElement(By.XPath(
".//td"));
// double slash: descendant at any depth
IWebElement td3_also = first_tr.FindElement(By.XPath(
"./td"));
// <input type="hidden" name="asin" value="ABCD1234">
IWebElement find_anywhere_in_doc = first_tr.FindElement(By.XPath(
"//input[@name='asin']"));
IWebElement find_in_subsection = first_tr.FindElement(By.XPath(
".//input[@name='asin']"));
// search entire page. useful for:
// - RulesLocator to find something that only appears once on the page
// - non-list pages. eg: product details
var onePerPageRules = new RuleFamily
{
RowsLocator = By.XPath("/*"), // search entire page
Rules = new RuleSet {
(row, productItem) => productItem.CustomerId = row.FindElement(By.XPath("//input[@name='cust_id']")).GetValue(),
(row, productItem) => productItem.UserName = row.FindElement(By.XPath("//input[@name='user_name']")).GetValue()
}
};
// - applying conditionals to entire page
var ruleFamily = new RuleFamily
{
RowsLocator = By.XPath("//*[starts-with(@id,'adbl-library-content-row-')]"),
// Rules = getRuleSet()
};
}
#region Rules classes stubs
public class RuleFamily { public By RowsLocator; public IRuleClass Rules; }
public interface IRuleClass { }
public class RuleSet : IRuleClass, IEnumerable<IRuleClass>
{
public void Add(IRuleClass ruleClass) { }
public void Add(RuleAction action) { }
public IEnumerator<IRuleClass> GetEnumerator() => throw new NotImplementedException();
System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() => throw new NotImplementedException();
}
public delegate void RuleAction(IWebElement row, ProductItem productItem);
public class ProductItem { public string CustomerId; public string UserName; }
#endregion
}
}

View File

@ -0,0 +1,47 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using OpenQA.Selenium;
using OpenQA.Selenium.Interactions;
namespace AudibleDotComAutomation
{
public static class IWebElementExt
{
// allows getting Text from elements even if hidden
// this only works on visible elements: webElement.Text
// http://yizeng.me/2014/04/08/get-text-from-hidden-elements-using-selenium-webdriver/#c-sharp
//
public static string GetText(this IWebElement webElement) => webElement.GetAttribute("textContent");
public static string GetValue(this IWebElement webElement) => webElement.GetAttribute("value");
}
public static class IWebDriverExt
{
/// <summary>Use this instead of element.Click() to ensure that the element is clicked even if it's not currently scrolled into view</summary>
public static void Click(this IWebDriver driver, IWebElement element)
{
// from: https://stackoverflow.com/questions/12035023/selenium-webdriver-cant-click-on-a-link-outside-the-page
//// this works but isn't really the same
//element.SendKeys(Keys.Enter);
//// didn't work for me
//new Actions(driver)
// .MoveToElement(element)
// .Click()
// .Build()
// .Perform();
driver.ScrollIntoView(element);
element.Click();
}
public static void ScrollIntoView(this IWebDriver driver, IWebElement element)
=> ((IJavaScriptExecutor)driver).ExecuteScript($"window.scroll({element.Location.X}, {element.Location.Y})");
}
}

Binary file not shown.

View File

@ -0,0 +1,16 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netstandard2.1</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="System.Data.SQLite.Core" Version="1.0.112" />
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.6.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\FileManager\FileManager.csproj" />
</ItemGroup>
</Project>

View File

@ -0,0 +1,66 @@
using System;
using System.Collections.Generic;
using System.Data.SQLite;
using System.IO;
using System.Text;
using System.Threading.Tasks;
using FileManager;
namespace CookieMonster
{
internal class Chrome : IBrowser
{
public async Task<IEnumerable<CookieValue>> GetAllCookiesAsync()
{
var col = new List<CookieValue>();
var strPath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), @"Google\Chrome\User Data\Default\Cookies");
if (!FileUtility.FileExists(strPath))
return col;
//
// IF WE GET AN ERROR HERE
// then add a reference to sqlite core in the project which is ultimately calling this.
// a project which directly references CookieMonster doesn't need to also ref sqlite.
// however, for any further number of abstractions, the project needs to directly ref sqlite.
// eg: this will not work unless the winforms proj adds sqlite to ref.s:
// LibationWinForm > AudibleDotComAutomation > CookieMonster
//
using var conn = new SQLiteConnection("Data Source=" + strPath + ";pooling=false");
using var cmd = conn.CreateCommand();
cmd.CommandText = "SELECT host_key, name, value, encrypted_value, last_access_utc, expires_utc FROM cookies;";
conn.Open();
using var reader = await cmd.ExecuteReaderAsync().ConfigureAwait(false);
while (reader.Read())
{
var host_key = reader.GetString(0);
var name = reader.GetString(1);
var value = reader.GetString(2);
var last_access_utc = reader.GetInt64(4);
var expires_utc = reader.GetInt64(5);
// https://stackoverflow.com/a/25874366
if (string.IsNullOrWhiteSpace(value))
{
var encrypted_value = (byte[])reader[3];
var decodedData = System.Security.Cryptography.ProtectedData.Unprotect(encrypted_value, null, System.Security.Cryptography.DataProtectionScope.CurrentUser);
value = Encoding.ASCII.GetString(decodedData);
}
try
{
// if something goes wrong in this step (eg: a cookie has an invalid filetime), then just skip this cookie
col.Add(new CookieValue { Browser = "chrome", Domain = host_key, Name = name, Value = value, LastAccess = chromeTimeToDateTimeUtc(last_access_utc), Expires = chromeTimeToDateTimeUtc(expires_utc) });
}
catch { }
}
return col;
}
// Chrome uses 1601-01-01 00:00:00 UTC as the epoch (ie the starting point for the millisecond time counter).
// this is the same as "FILETIME" in Win32 except FILETIME uses 100ns ticks instead of ms.
private static DateTime chromeTimeToDateTimeUtc(long time) => DateTime.SpecifyKind(DateTime.FromFileTime(time * 10), DateTimeKind.Utc);
}
}

View File

@ -0,0 +1,61 @@
using System;
using System.Collections.Generic;
using System.Data.SQLite;
using System.IO;
using System.Threading.Tasks;
using FileManager;
namespace CookieMonster
{
internal class FireFox : IBrowser
{
public async Task<IEnumerable<CookieValue>> GetAllCookiesAsync()
{
var col = new List<CookieValue>();
string strPath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), @"Mozilla\Firefox\Profiles");
if (!FileUtility.FileExists(strPath))
return col;
var dirs = new DirectoryInfo(strPath).GetDirectories("*.default");
if (dirs.Length != 1)
return col;
strPath = Path.Combine(strPath, dirs[0].Name, "cookies.sqlite");
if (!FileUtility.FileExists(strPath))
return col;
// First copy the cookie jar so that we can read the cookies from unlocked copy while FireFox is running
var strTemp = strPath + ".temp";
File.Copy(strPath, strTemp, true);
// Now open the temporary cookie jar and extract Value from the cookie if we find it.
using var conn = new SQLiteConnection("Data Source=" + strTemp + ";pooling=false");
using var cmd = conn.CreateCommand();
cmd.CommandText = "SELECT host, name, value, lastAccessed, expiry FROM moz_cookies; ";
conn.Open();
using var reader = await cmd.ExecuteReaderAsync().ConfigureAwait(false);
while (reader.Read())
{
var host_key = reader.GetString(0);
var name = reader.GetString(1);
var value = reader.GetString(2);
var lastAccessed = reader.GetInt32(3);
var expiry = reader.GetInt32(4);
col.Add(new CookieValue { Browser = "firefox", Domain = host_key, Name = name, Value = value, LastAccess = lastAccessedToDateTime(lastAccessed), Expires = expiryToDateTime(expiry) });
}
if (FileUtility.FileExists(strTemp))
File.Delete(strTemp);
return col;
}
// time is in microseconds since unix epoch
private static DateTime lastAccessedToDateTime(int time) => new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc).AddMilliseconds(time);
// time is in normal seconds since unix epoch
private static DateTime expiryToDateTime(int time) => new DateTime(1970, 1, 1, 0, 0, 0, 0, System.DateTimeKind.Utc).AddSeconds(time);
}
}

View File

@ -0,0 +1,12 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace CookieMonster
{
internal interface IBrowser
{
Task<IEnumerable<CookieValue>> GetAllCookiesAsync();
}
}

View File

@ -0,0 +1,87 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace CookieMonster
{
internal class InternetExplorer : IBrowser
{
public async Task<IEnumerable<CookieValue>> GetAllCookiesAsync()
{
// real locations of Windows Cookies folders
//
// Windows 7:
// C:\Users\username\AppData\Roaming\Microsoft\Windows\Cookies
// C:\Users\username\AppData\Roaming\Microsoft\Windows\Cookies\Low
//
// Windows 8, Windows 8.1, Windows 10:
// C:\Users\username\AppData\Local\Microsoft\Windows\INetCookies
// C:\Users\username\AppData\Local\Microsoft\Windows\INetCookies\Low
var strPath = Environment.GetFolderPath(Environment.SpecialFolder.Cookies);
var col = (await getIECookiesAsync(strPath).ConfigureAwait(false)).ToList();
col = col.Concat(await getIECookiesAsync(Path.Combine(strPath, "Low"))).ToList();
return col;
}
private static async Task<IEnumerable<CookieValue>> getIECookiesAsync(string strPath)
{
var cookies = new List<CookieValue>();
var files = await Task.Run(() => Directory.EnumerateFiles(strPath, "*.txt"));
foreach (string path in files)
{
var cookiesInFile = new List<CookieValue>();
var cookieLines = File.ReadAllLines(path);
CookieValue currCookieVal = null;
for (var i = 0; i < cookieLines.Length; i++)
{
var line = cookieLines[i];
// IE cookie format
// 0 Cookie name
// 1 Cookie value
// 2 Host / path for the web server setting the cookie
// 3 Flags
// 4 Expiration time (low int)
// 5 Expiration time (high int)
// 6 Creation time (low int)
// 7 Creation time (high int)
// 8 Record delimiter == "*"
var pos = i % 9;
long expLoTemp = 0;
long creatLoTemp = 0;
if (pos == 0)
{
currCookieVal = new CookieValue { Browser = "ie", Name = line };
cookiesInFile.Add(currCookieVal);
}
else if (pos == 1)
currCookieVal.Value = line;
else if (pos == 2)
currCookieVal.Domain = line;
else if (pos == 4)
expLoTemp = Int64.Parse(line);
else if (pos == 5)
currCookieVal.Expires = LoHiToDateTime(expLoTemp, Int64.Parse(line));
else if (pos == 6)
creatLoTemp = Int64.Parse(line);
else if (pos == 7)
currCookieVal.LastAccess = LoHiToDateTime(creatLoTemp, Int64.Parse(line));
}
cookies.AddRange(cookiesInFile);
}
return cookies;
}
private static DateTime LoHiToDateTime(long lo, long hi) => DateTime.FromFileTimeUtc(((hi << 32) + lo));
}
}

View File

@ -0,0 +1,32 @@
using System;
namespace CookieMonster
{
public class CookieValue
{
public string Browser { get; set; }
public string Name { get; set; }
public string Value { get; set; }
public string Domain { get; set; }
public DateTime LastAccess { get; set; }
public DateTime Expires { get; set; }
public bool IsValid
{
get
{
// sanity check. datetimes are stored weird in each cookie type. make sure i haven't converted these incredibly wrong.
// some early conversion attempts produced years like 42, 1955, 4033
var _5yearsPast = DateTime.UtcNow.AddYears(-5);
if (LastAccess < _5yearsPast || LastAccess > DateTime.UtcNow)
return false;
// don't check expiry. some sites are setting stupid values for year. eg: 9999
return true;
}
}
public bool HasExpired => Expires < DateTime.UtcNow;
}
}

View File

@ -0,0 +1,57 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Dinah.Core.Collections.Generic;
namespace CookieMonster
{
public static class CookiesHelper
{
internal static IEnumerable<IBrowser> GetBrowsers()
=> AppDomain.CurrentDomain
.GetAssemblies()
.SelectMany(s => s.GetTypes())
.Where(p => typeof(IBrowser).IsAssignableFrom(p) && !p.IsAbstract && !p.IsInterface)
.Select(t => Activator.CreateInstance(t) as IBrowser)
.ToList();
/// <summary>all. including expired</summary>
public static async Task<IEnumerable<CookieValue>> GetAllCookieValuesAsync()
{
//// foreach{await} runs in serial
//var allCookies = new List<CookieValue>();
//foreach (var b in GetBrowsers())
//{
// var browserCookies = await b.GetAllCookiesAsync().ConfigureAwait(false);
// allCookies.AddRange(browserCookies);
//}
//// WhenAll runs in parallel
// this 1st step LOOKS like a bug which runs each method until completion. However, since we don't use await, it's actually returning a Task. That resulting task is awaited asynchronously
var browserTasks = GetBrowsers().Select(b => b.GetAllCookiesAsync());
var results = await Task.WhenAll(browserTasks).ConfigureAwait(false);
var allCookies = results.SelectMany(a => a).ToList();
if (allCookies.Any(c => !c.IsValid))
throw new Exception("some date time was converted way too far");
foreach (var c in allCookies)
c.Domain = c.Domain.TrimEnd('/');
// for each domain+name, only keep the 1 with the most recent access
var sortedCookies = allCookies
.OrderByDescending(c => c.LastAccess)
.DistinctBy(c => new { c.Domain, c.Name })
.ToList();
return sortedCookies;
}
/// <summary>not expired</summary>
public static async Task<IEnumerable<CookieValue>> GetLiveCookieValuesAsync()
=> (await GetAllCookieValuesAsync().ConfigureAwait(false))
.Where(c => !c.HasExpired)
.ToList();
}
}

View File

@ -1,38 +1,36 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net9.0</TargetFramework>
<TargetFrameworks>netcoreapp3.0;netstandard2.1</TargetFrameworks>
</PropertyGroup>
<PropertyGroup>
<GenerateRuntimeConfigurationFiles>true</GenerateRuntimeConfigurationFiles>
<ApplicationIcon />
<OutputType>Library</OutputType>
<StartupObject />
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Dinah.Core" Version="9.0.3.1" />
<PackageReference Include="Dinah.EntityFrameworkCore" Version="9.0.0.1" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.8">
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="3.0.0">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="9.0.8" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Tools" Version="9.0.8">
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="3.0.0" />
<PackageReference Include="Microsoft.EntityFrameworkCore.SqlServer" Version="3.0.0" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Tools" Version="3.0.0">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
<DebugType>embedded</DebugType>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
<DebugType>embedded</DebugType>
</PropertyGroup>
<ItemGroup>
<None Update="migrate.json">
<ProjectReference Include="..\..\Dinah.Core\Dinah.EntityFrameworkCore\Dinah.EntityFrameworkCore.csproj" />
<ProjectReference Include="..\FileManager\FileManager.csproj" />
</ItemGroup>
<ItemGroup>
<None Update="appsettings.json">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>

View File

@ -3,53 +3,57 @@ using System;
using DataLayer;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Metadata;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace DataLayer.Migrations
{
[DbContext(typeof(LibationContext))]
[Migration("20200812152646_AddLocaleAndAccount")]
partial class AddLocaleAndAccount
[Migration("20191007202808_UpgradeToCore3")]
partial class UpgradeToCore3
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "3.1.7");
.HasAnnotation("ProductVersion", "3.0.0")
.HasAnnotation("Relational:MaxIdentifierLength", 128)
.HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn);
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Property<int>("BookId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
.HasColumnType("int")
.HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn);
b.Property<string>("AudibleProductId")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(450)");
b.Property<int>("CategoryId")
.HasColumnType("INTEGER");
.HasColumnType("int");
b.Property<DateTime?>("DatePublished")
.HasColumnType("TEXT");
.HasColumnType("datetime2");
b.Property<string>("Description")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(max)");
b.Property<bool>("HasBookDetails")
.HasColumnType("bit");
b.Property<bool>("IsAbridged")
.HasColumnType("INTEGER");
.HasColumnType("bit");
b.Property<int>("LengthInMinutes")
.HasColumnType("INTEGER");
b.Property<string>("Locale")
.HasColumnType("TEXT");
.HasColumnType("int");
b.Property<string>("PictureId")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(max)");
b.Property<string>("Title")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(max)");
b.HasKey("BookId");
@ -63,16 +67,16 @@ namespace DataLayer.Migrations
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
.HasColumnType("int");
b.Property<int>("ContributorId")
.HasColumnType("INTEGER");
.HasColumnType("int");
b.Property<int>("Role")
.HasColumnType("INTEGER");
.HasColumnType("int");
b.Property<byte>("Order")
.HasColumnType("INTEGER");
.HasColumnType("tinyint");
b.HasKey("BookId", "ContributorId", "Role");
@ -87,16 +91,17 @@ namespace DataLayer.Migrations
{
b.Property<int>("CategoryId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
.HasColumnType("int")
.HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn);
b.Property<string>("AudibleCategoryId")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(450)");
b.Property<string>("Name")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(max)");
b.Property<int?>("ParentCategoryCategoryId")
.HasColumnType("INTEGER");
.HasColumnType("int");
b.HasKey("CategoryId");
@ -119,38 +124,32 @@ namespace DataLayer.Migrations
{
b.Property<int>("ContributorId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
.HasColumnType("int")
.HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn);
b.Property<string>("AudibleContributorId")
.HasColumnType("TEXT");
b.Property<string>("AudibleAuthorId")
.HasColumnType("nvarchar(max)");
b.Property<string>("Name")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(450)");
b.HasKey("ContributorId");
b.HasIndex("Name");
b.ToTable("Contributors");
b.HasData(
new
{
ContributorId = -1,
Name = ""
});
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<string>("Account")
.HasColumnType("TEXT");
.HasColumnType("int");
b.Property<DateTime>("DateAdded")
.HasColumnType("TEXT");
.HasColumnType("datetime2");
b.Property<string>("DownloadBookLink")
.HasColumnType("nvarchar(max)");
b.HasKey("BookId");
@ -161,13 +160,14 @@ namespace DataLayer.Migrations
{
b.Property<int>("SeriesId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
.HasColumnType("int")
.HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn);
b.Property<string>("AudibleSeriesId")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(450)");
b.Property<string>("Name")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(max)");
b.HasKey("SeriesId");
@ -179,13 +179,13 @@ namespace DataLayer.Migrations
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
.HasColumnType("int");
b.Property<int>("BookId")
.HasColumnType("INTEGER");
.HasColumnType("int");
b.Property<float?>("Index")
.HasColumnType("REAL");
.HasColumnType("real");
b.HasKey("SeriesId", "BookId");
@ -207,16 +207,18 @@ namespace DataLayer.Migrations
b.OwnsOne("DataLayer.Rating", "Rating", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
.ValueGeneratedOnAdd()
.HasColumnType("int")
.HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn);
b1.Property<float>("OverallRating")
.HasColumnType("REAL");
.HasColumnType("real");
b1.Property<float>("PerformanceRating")
.HasColumnType("REAL");
.HasColumnType("real");
b1.Property<float>("StoryRating")
.HasColumnType("REAL");
.HasColumnType("real");
b1.HasKey("BookId");
@ -230,13 +232,14 @@ namespace DataLayer.Migrations
{
b1.Property<int>("SupplementId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
.HasColumnType("int")
.HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn);
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
.HasColumnType("int");
b1.Property<string>("Url")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(max)");
b1.HasKey("SupplementId");
@ -251,10 +254,10 @@ namespace DataLayer.Migrations
b.OwnsOne("DataLayer.UserDefinedItem", "UserDefinedItem", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
.HasColumnType("int");
b1.Property<string>("Tags")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(max)");
b1.HasKey("BookId");
@ -266,16 +269,16 @@ namespace DataLayer.Migrations
b1.OwnsOne("DataLayer.Rating", "Rating", b2 =>
{
b2.Property<int>("UserDefinedItemBookId")
.HasColumnType("INTEGER");
.HasColumnType("int");
b2.Property<float>("OverallRating")
.HasColumnType("REAL");
.HasColumnType("real");
b2.Property<float>("PerformanceRating")
.HasColumnType("REAL");
.HasColumnType("real");
b2.Property<float>("StoryRating")
.HasColumnType("REAL");
.HasColumnType("real");
b2.HasKey("UserDefinedItemBookId");

View File

@ -3,7 +3,7 @@ using Microsoft.EntityFrameworkCore.Migrations;
namespace DataLayer.Migrations
{
public partial class Fresh : Migration
public partial class UpgradeToCore3 : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
@ -12,7 +12,7 @@ namespace DataLayer.Migrations
columns: table => new
{
CategoryId = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
.Annotation("SqlServer:Identity", "1, 1"),
AudibleCategoryId = table.Column<string>(nullable: true),
Name = table.Column<string>(nullable: true),
ParentCategoryCategoryId = table.Column<int>(nullable: true)
@ -33,9 +33,9 @@ namespace DataLayer.Migrations
columns: table => new
{
ContributorId = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
.Annotation("SqlServer:Identity", "1, 1"),
Name = table.Column<string>(nullable: true),
AudibleContributorId = table.Column<string>(nullable: true)
AudibleAuthorId = table.Column<string>(nullable: true)
},
constraints: table =>
{
@ -47,7 +47,7 @@ namespace DataLayer.Migrations
columns: table => new
{
SeriesId = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
.Annotation("SqlServer:Identity", "1, 1"),
AudibleSeriesId = table.Column<string>(nullable: true),
Name = table.Column<string>(nullable: true)
},
@ -61,12 +61,13 @@ namespace DataLayer.Migrations
columns: table => new
{
BookId = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
.Annotation("SqlServer:Identity", "1, 1"),
AudibleProductId = table.Column<string>(nullable: true),
Title = table.Column<string>(nullable: true),
Description = table.Column<string>(nullable: true),
LengthInMinutes = table.Column<int>(nullable: false),
PictureId = table.Column<string>(nullable: true),
HasBookDetails = table.Column<bool>(nullable: false),
IsAbridged = table.Column<bool>(nullable: false),
DatePublished = table.Column<DateTime>(nullable: true),
CategoryId = table.Column<int>(nullable: false),
@ -116,7 +117,8 @@ namespace DataLayer.Migrations
columns: table => new
{
BookId = table.Column<int>(nullable: false),
DateAdded = table.Column<DateTime>(nullable: false)
DateAdded = table.Column<DateTime>(nullable: false),
DownloadBookLink = table.Column<string>(nullable: true)
},
constraints: table =>
{
@ -159,7 +161,7 @@ namespace DataLayer.Migrations
columns: table => new
{
SupplementId = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
.Annotation("SqlServer:Identity", "1, 1"),
BookId = table.Column<int>(nullable: false),
Url = table.Column<string>(nullable: true)
},
@ -200,11 +202,6 @@ namespace DataLayer.Migrations
columns: new[] { "CategoryId", "AudibleCategoryId", "Name", "ParentCategoryCategoryId" },
values: new object[] { -1, "", "", null });
migrationBuilder.InsertData(
table: "Contributors",
columns: new[] { "ContributorId", "AudibleContributorId", "Name" },
values: new object[] { -1, null, "" });
migrationBuilder.CreateIndex(
name: "IX_BookContributor_BookId",
table: "BookContributor",

View File

@ -3,50 +3,55 @@ using System;
using DataLayer;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Metadata;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace DataLayer.Migrations
{
[DbContext(typeof(LibationContext))]
[Migration("20191125182309_Fresh")]
partial class Fresh
partial class LibationContextModelSnapshot : ModelSnapshot
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
protected override void BuildModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "3.0.0");
.HasAnnotation("ProductVersion", "3.0.0")
.HasAnnotation("Relational:MaxIdentifierLength", 128)
.HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn);
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Property<int>("BookId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
.HasColumnType("int")
.HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn);
b.Property<string>("AudibleProductId")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(450)");
b.Property<int>("CategoryId")
.HasColumnType("INTEGER");
.HasColumnType("int");
b.Property<DateTime?>("DatePublished")
.HasColumnType("TEXT");
.HasColumnType("datetime2");
b.Property<string>("Description")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(max)");
b.Property<bool>("HasBookDetails")
.HasColumnType("bit");
b.Property<bool>("IsAbridged")
.HasColumnType("INTEGER");
.HasColumnType("bit");
b.Property<int>("LengthInMinutes")
.HasColumnType("INTEGER");
.HasColumnType("int");
b.Property<string>("PictureId")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(max)");
b.Property<string>("Title")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(max)");
b.HasKey("BookId");
@ -60,16 +65,16 @@ namespace DataLayer.Migrations
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
.HasColumnType("int");
b.Property<int>("ContributorId")
.HasColumnType("INTEGER");
.HasColumnType("int");
b.Property<int>("Role")
.HasColumnType("INTEGER");
.HasColumnType("int");
b.Property<byte>("Order")
.HasColumnType("INTEGER");
.HasColumnType("tinyint");
b.HasKey("BookId", "ContributorId", "Role");
@ -84,16 +89,17 @@ namespace DataLayer.Migrations
{
b.Property<int>("CategoryId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
.HasColumnType("int")
.HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn);
b.Property<string>("AudibleCategoryId")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(450)");
b.Property<string>("Name")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(max)");
b.Property<int?>("ParentCategoryCategoryId")
.HasColumnType("INTEGER");
.HasColumnType("int");
b.HasKey("CategoryId");
@ -116,35 +122,32 @@ namespace DataLayer.Migrations
{
b.Property<int>("ContributorId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
.HasColumnType("int")
.HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn);
b.Property<string>("AudibleContributorId")
.HasColumnType("TEXT");
b.Property<string>("AudibleAuthorId")
.HasColumnType("nvarchar(max)");
b.Property<string>("Name")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(450)");
b.HasKey("ContributorId");
b.HasIndex("Name");
b.ToTable("Contributors");
b.HasData(
new
{
ContributorId = -1,
Name = ""
});
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
.HasColumnType("int");
b.Property<DateTime>("DateAdded")
.HasColumnType("TEXT");
.HasColumnType("datetime2");
b.Property<string>("DownloadBookLink")
.HasColumnType("nvarchar(max)");
b.HasKey("BookId");
@ -155,13 +158,14 @@ namespace DataLayer.Migrations
{
b.Property<int>("SeriesId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
.HasColumnType("int")
.HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn);
b.Property<string>("AudibleSeriesId")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(450)");
b.Property<string>("Name")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(max)");
b.HasKey("SeriesId");
@ -173,13 +177,13 @@ namespace DataLayer.Migrations
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
.HasColumnType("int");
b.Property<int>("BookId")
.HasColumnType("INTEGER");
.HasColumnType("int");
b.Property<float?>("Index")
.HasColumnType("REAL");
.HasColumnType("real");
b.HasKey("SeriesId", "BookId");
@ -201,16 +205,18 @@ namespace DataLayer.Migrations
b.OwnsOne("DataLayer.Rating", "Rating", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
.ValueGeneratedOnAdd()
.HasColumnType("int")
.HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn);
b1.Property<float>("OverallRating")
.HasColumnType("REAL");
.HasColumnType("real");
b1.Property<float>("PerformanceRating")
.HasColumnType("REAL");
.HasColumnType("real");
b1.Property<float>("StoryRating")
.HasColumnType("REAL");
.HasColumnType("real");
b1.HasKey("BookId");
@ -224,13 +230,14 @@ namespace DataLayer.Migrations
{
b1.Property<int>("SupplementId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
.HasColumnType("int")
.HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn);
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
.HasColumnType("int");
b1.Property<string>("Url")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(max)");
b1.HasKey("SupplementId");
@ -245,10 +252,10 @@ namespace DataLayer.Migrations
b.OwnsOne("DataLayer.UserDefinedItem", "UserDefinedItem", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
.HasColumnType("int");
b1.Property<string>("Tags")
.HasColumnType("TEXT");
.HasColumnType("nvarchar(max)");
b1.HasKey("BookId");
@ -260,16 +267,16 @@ namespace DataLayer.Migrations
b1.OwnsOne("DataLayer.Rating", "Rating", b2 =>
{
b2.Property<int>("UserDefinedItemBookId")
.HasColumnType("INTEGER");
.HasColumnType("int");
b2.Property<float>("OverallRating")
.HasColumnType("REAL");
.HasColumnType("real");
b2.Property<float>("PerformanceRating")
.HasColumnType("REAL");
.HasColumnType("real");
b2.Property<float>("StoryRating")
.HasColumnType("REAL");
.HasColumnType("real");
b2.HasKey("UserDefinedItemBookId");

View File

@ -0,0 +1,19 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.EntityFrameworkCore;
namespace DataLayer
{
public static class RemoveOrphansCommand
{
public static int RemoveOrphans(this LibationContext context)
=> context.Database.ExecuteSqlRaw(@"
delete c
from Contributors c
left join BookContributor bc on c.ContributorId = bc.ContributorId
left join Books b on bc.BookId = b.BookId
where bc.ContributorId is null
");
}
}

View File

@ -0,0 +1,71 @@
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Builders;
namespace DataLayer.Configurations
{
internal class BookConfig : IEntityTypeConfiguration<Book>
{
public void Configure(EntityTypeBuilder<Book> entity)
{
entity.HasKey(b => b.BookId);
entity.HasIndex(b => b.AudibleProductId);
entity.OwnsOne(b => b.Rating);
//
// CRUCIAL: ignore unmapped collections, even get-only
//
entity.Ignore(nameof(Book.Authors));
entity.Ignore(nameof(Book.Narrators));
//// these don't seem to matter
//entity.Ignore(nameof(Book.AuthorNames));
//entity.Ignore(nameof(Book.NarratorNames));
//entity.Ignore(nameof(Book.HasPdfs));
// OwnsMany: "Can only ever appear on navigation properties of other entity types.
// Are automatically loaded, and can only be tracked by a DbContext alongside their owner."
entity
.OwnsMany(b => b.Supplements, b_s =>
{
b_s.WithOwner(s => s.Book)
.HasForeignKey(s => s.BookId);
b_s.HasKey(s => s.SupplementId);
});
// even though it's owned, we need to map its backing field
entity
.Metadata
.FindNavigation(nameof(Book.Supplements))
.SetPropertyAccessMode(PropertyAccessMode.Field);
// owns it 1:1, store in separate table
entity
.OwnsOne(b => b.UserDefinedItem, b_udi =>
{
b_udi.WithOwner(udi => udi.Book)
.HasForeignKey(udi => udi.BookId);
b_udi.Property(udi => udi.BookId).ValueGeneratedNever();
b_udi.ToTable(nameof(Book.UserDefinedItem));
// owns it 1:1, store in same table
b_udi.OwnsOne(udi => udi.Rating);
});
entity
.Metadata
.FindNavigation(nameof(Book.ContributorsLink))
// PropertyAccessMode.Field : Contributions is a get-only property, not a field, so use its backing field
.SetPropertyAccessMode(PropertyAccessMode.Field);
entity
.Metadata
.FindNavigation(nameof(Book.SeriesLink))
// PropertyAccessMode.Field : Series is a get-only property, not a field, so use its backing field
.SetPropertyAccessMode(PropertyAccessMode.Field);
entity
.HasOne(b => b.Category)
.WithMany()
.HasForeignKey(b => b.CategoryId);
}
}
}

View File

@ -9,8 +9,8 @@ namespace DataLayer.Configurations
{
entity.HasKey(bc => new { bc.BookId, bc.ContributorId, bc.Role });
entity.HasIndex(bc => bc.BookId);
entity.HasIndex(bc => bc.ContributorId);
entity.HasIndex(b => b.BookId);
entity.HasIndex(b => b.ContributorId);
entity
.HasOne(bc => bc.Book)

View File

@ -9,12 +9,6 @@ namespace DataLayer.Configurations
{
entity.HasKey(c => c.CategoryId);
entity.HasIndex(c => c.AudibleCategoryId);
entity.Ignore(c => c.CategoryLadders);
entity
.HasMany(e => e._categoryLadders)
.WithMany(e => e._categories);
}
}
}

View File

@ -17,9 +17,6 @@ namespace DataLayer.Configurations
.Metadata
.FindNavigation(nameof(Contributor.BooksLink))
.SetPropertyAccessMode(PropertyAccessMode.Field);
// seeds go here. examples in Dinah.EntityFrameworkCore.Tests\DbContextFactoryExample.cs
entity.HasData(Contributor.GetEmpty());
}
}
}

View File

@ -0,0 +1,18 @@
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Builders;
namespace DataLayer.Configurations
{
internal class LibraryBookConfig : IEntityTypeConfiguration<LibraryBook>
{
public void Configure(EntityTypeBuilder<LibraryBook> entity)
{
entity.HasKey(b => b.BookId);
entity
.HasOne(le => le.Book)
.WithOne()
.HasForeignKey<LibraryBook>(le => le.BookId);
}
}
}

View File

@ -7,10 +7,10 @@ namespace DataLayer.Configurations
{
public void Configure(EntityTypeBuilder<SeriesBook> entity)
{
entity.HasKey(sb => new { sb.SeriesId, sb.BookId });
entity.HasKey(bc => new { bc.SeriesId, bc.BookId });
entity.HasIndex(sb => sb.SeriesId);
entity.HasIndex(sb => sb.BookId);
entity.HasIndex(b => b.SeriesId);
entity.HasIndex(b => b.BookId);
entity
.HasOne(sb => sb.Series)

View File

@ -7,8 +7,8 @@ namespace DataLayer.Configurations
{
public void Configure(EntityTypeBuilder<Series> entity)
{
entity.HasKey(s => s.SeriesId);
entity.HasIndex(s => s.AudibleSeriesId);
entity.HasKey(b => b.SeriesId);
entity.HasIndex(b => b.AudibleSeriesId);
entity
.Metadata

View File

@ -0,0 +1,254 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Dinah.Core;
using Microsoft.EntityFrameworkCore;
namespace DataLayer
{
public class AudibleProductId
{
public string Id { get; }
public AudibleProductId(string id)
{
ArgumentValidator.EnsureNotNullOrWhiteSpace(id, nameof(id));
Id = id;
}
}
public class Book
{
// implementation detail. set by db only. only used by data layer
internal int BookId { get; private set; }
// immutable
public string AudibleProductId { get; private set; }
public string Title { get; private set; }
public string Description { get; private set; }
public int LengthInMinutes { get; private set; }
// mutable
public string PictureId { get; set; }
// book details
public bool HasBookDetails { get; private set; }
public bool IsAbridged { get; private set; }
public DateTime? DatePublished { get; private set; }
// non-null. use "empty pattern"
internal int CategoryId { get; private set; }
public Category Category { get; private set; }
public string[] CategoriesNames
=> Category == null ? new string[0]
: Category.ParentCategory == null ? new[] { Category.Name }
: new[] { Category.ParentCategory.Name, Category.Name };
public string[] CategoriesIds
=> Category == null ? null
: Category.ParentCategory == null ? new[] { Category.AudibleCategoryId }
: new[] { Category.ParentCategory.AudibleCategoryId, Category.AudibleCategoryId };
// is owned, not optional 1:1
public UserDefinedItem UserDefinedItem { get; private set; }
// is owned, not optional 1:1
/// <summary>The product's aggregate community rating</summary>
public Rating Rating { get; private set; } = new Rating(0, 0, 0);
// ef-ctor
private Book() { }
// non-ef ctor
/// <param name="audibleProductId">special id class b/c it's too easy to get string order mixed up</param>
public Book(
AudibleProductId audibleProductId,
string title,
string description,
int lengthInMinutes,
IEnumerable<Contributor> authors)
{
// validate
ArgumentValidator.EnsureNotNull(audibleProductId, nameof(audibleProductId));
var productId = audibleProductId.Id;
ArgumentValidator.EnsureNotNullOrWhiteSpace(productId, nameof(productId));
ArgumentValidator.EnsureNotNullOrWhiteSpace(title, nameof(title));
// non-ef-ctor init.s
UserDefinedItem = new UserDefinedItem(this);
_contributorsLink = new HashSet<BookContributor>();
_seriesLink = new HashSet<SeriesBook>();
_supplements = new HashSet<Supplement>();
// since category/id is never null, nullity means it hasn't been loaded
CategoryId = Category.GetEmpty().CategoryId;
// simple assigns
AudibleProductId = productId;
Title = title;
Description = description;
LengthInMinutes = lengthInMinutes;
// assigns with biz logic
ReplaceAuthors(authors);
//ReplaceNarrators(narrators);
// import previously saved tags
// do this immediately. any save occurs before reloading tags will overwrite persistent tags with new blank entries; all old persisted tags will be lost
// if refactoring, DO NOT use "ProductId" before it's assigned to. to be safe, just use "productId"
UserDefinedItem = new UserDefinedItem(this) { Tags = FileManager.TagsPersistence.GetTags(productId) };
}
#region contributors, authors, narrators
// use uninitialised backing fields - this means we can detect if the collection was loaded
private HashSet<BookContributor> _contributorsLink;
// i'd like this to be internal but migration throws this exception when i try:
// Value cannot be null.
// Parameter name: property
public IEnumerable<BookContributor> ContributorsLink
=> _contributorsLink?
.OrderBy(bc => bc.Order)
.ToList();
public IEnumerable<Contributor> Authors => getContributions(Role.Author).Select(bc => bc.Contributor).ToList();
public string AuthorNames => string.Join(", ", Authors.Select(a => a.Name));
public IEnumerable<Contributor> Narrators => getContributions(Role.Narrator).Select(bc => bc.Contributor).ToList();
public string NarratorNames => string.Join(", ", Narrators.Select(n => n.Name));
public string Publisher => getContributions(Role.Publisher).SingleOrDefault()?.Contributor.Name;
public void ReplaceAuthors(IEnumerable<Contributor> authors, DbContext context = null)
=> replaceContributors(authors, Role.Author, context);
public void ReplaceNarrators(IEnumerable<Contributor> narrators, DbContext context = null)
=> replaceContributors(narrators, Role.Narrator, context);
public void ReplacePublisher(Contributor publisher, DbContext context = null)
=> replaceContributors(new List<Contributor> { publisher }, Role.Publisher, context);
private void replaceContributors(IEnumerable<Contributor> newContributors, Role role, DbContext context = null)
{
ArgumentValidator.EnsureEnumerableNotNullOrEmpty(newContributors, nameof(newContributors));
// the edge cases of doing local-loaded vs remote-only got weird. just load it
if (_contributorsLink == null)
{
ArgumentValidator.EnsureNotNull(context, nameof(context));
if (!context.Entry(this).IsKeySet)
throw new InvalidOperationException("Could not add contributors");
context.Entry(this).Collection(s => s.ContributorsLink).Load();
}
var roleContributions = getContributions(role);
var isIdentical = roleContributions.Select(c => c.Contributor).SequenceEqual(newContributors);
if (isIdentical)
return;
_contributorsLink.RemoveWhere(bc => bc.Role == role);
addNewContributors(newContributors, role);
}
private void addNewContributors(IEnumerable<Contributor> newContributors, Role role)
{
byte order = 0;
var newContributionsEnum = newContributors.Select(c => new BookContributor(this, c, role, order++));
var newContributions = new HashSet<BookContributor>(newContributionsEnum);
_contributorsLink.UnionWith(newContributions);
}
private List<BookContributor> getContributions(Role role)
=> ContributorsLink
.Where(a => a.Role == role)
.OrderBy(a => a.Order)
.ToList();
#endregion
#region series
private HashSet<SeriesBook> _seriesLink;
public IEnumerable<SeriesBook> SeriesLink => _seriesLink?.ToList();
public string SeriesNames
{
get
{
// first: alphabetical by name
var withNames = _seriesLink
.Where(s => !string.IsNullOrWhiteSpace(s.Series.Name))
.Select(s => s.Series.Name)
.OrderBy(a => a)
.ToList();
// then un-named are alpha by series id
var nullNames = _seriesLink
.Where(s => string.IsNullOrWhiteSpace(s.Series.Name))
.Select(s => s.Series.AudibleSeriesId)
.OrderBy(a => a)
.ToList();
var all = withNames.Union(nullNames).ToList();
return string.Join(", ", all);
}
}
public void UpsertSeries(Series series, float? index = null, DbContext context = null)
{
ArgumentValidator.EnsureNotNull(series, nameof(series));
// our add() is conditional upon what's already included in the collection.
// therefore if not loaded, a trip is required. might as well just load it
if (_seriesLink == null)
{
ArgumentValidator.EnsureNotNull(context, nameof(context));
if (!context.Entry(this).IsKeySet)
throw new InvalidOperationException("Could not add series");
context.Entry(this).Collection(s => s.SeriesLink).Load();
}
var singleSeriesBook = _seriesLink.SingleOrDefault(sb => sb.Series == series);
if (singleSeriesBook == null)
_seriesLink.Add(new SeriesBook(series, this, index));
else
singleSeriesBook.UpdateIndex(index);
}
#endregion
#region supplements
private HashSet<Supplement> _supplements;
public IEnumerable<Supplement> Supplements => _supplements?.ToList();
public bool HasPdfs => Supplements.Any();
public void AddSupplementDownloadUrl(string url)
{
// supplements are owned by Book, so no need to Load():
// OwnsMany: "Can only ever appear on navigation properties of other entity types.
// Are automatically loaded, and can only be tracked by a DbContext alongside their owner."
ArgumentValidator.EnsureNotNullOrWhiteSpace(url, nameof(url));
if (!_supplements.Any(s => url.EqualsInsensitive(url)))
_supplements.Add(new Supplement(this, url));
}
#endregion
public void UpdateProductRating(float overallRating, float performanceRating, float storyRating)
=> Rating.Update(overallRating, performanceRating, storyRating);
public void UpdateBookDetails(bool isAbridged, DateTime? datePublished)
{
// don't overwrite with default values
IsAbridged |= isAbridged;
DatePublished = datePublished ?? DatePublished;
HasBookDetails = true;
}
public void UpdateCategory(Category category, DbContext context = null)
{
// since category is never null, nullity means it hasn't been loaded
if (Category != null || CategoryId == Category.GetEmpty().CategoryId)
{
Category = category;
return;
}
if (context == null)
throw new Exception("need context");
context.Entry(this).Reference(s => s.Category).Load();
Category = category;
}
}
}

View File

@ -2,8 +2,6 @@
namespace DataLayer
{
public enum Role { Author = 1, Narrator = 2, Publisher = 3 }
public class BookContributor
{
internal int BookId { get; private set; }
@ -25,7 +23,5 @@ namespace DataLayer
Role = role;
Order = order;
}
public override string ToString() => $"{Book} {Contributor} {Role} {Order}";
}
}
}

View File

@ -0,0 +1,52 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Dinah.Core;
using Microsoft.EntityFrameworkCore;
namespace DataLayer
{
public class AudibleCategoryId
{
public string Id { get; }
public AudibleCategoryId(string id)
{
ArgumentValidator.EnsureNotNullOrWhiteSpace(id, nameof(id));
Id = id;
}
}
public class Category
{
// Empty is a special case. use private ctor w/o validation
public static Category GetEmpty() => new Category { CategoryId = -1, AudibleCategoryId = "", Name = "", ParentCategory = null };
public bool IsEmpty() => string.IsNullOrWhiteSpace(AudibleCategoryId) || string.IsNullOrWhiteSpace(Name) || ParentCategory == null;
internal int CategoryId { get; private set; }
public string AudibleCategoryId { get; private set; }
public string Name { get; private set; }
public Category ParentCategory { get; private set; }
private Category() { }
/// <summary>special id class b/c it's too easy to get string order mixed up</summary>
public Category(AudibleCategoryId audibleSeriesId, string name, Category parentCategory = null)
{
ArgumentValidator.EnsureNotNull(audibleSeriesId, nameof(audibleSeriesId));
var id = audibleSeriesId.Id;
ArgumentValidator.EnsureNotNullOrWhiteSpace(id, nameof(id));
ArgumentValidator.EnsureNotNullOrWhiteSpace(name, nameof(name));
AudibleCategoryId = id;
Name = name;
UpdateParentCategory(parentCategory);
}
public void UpdateParentCategory(Category parentCategory)
{
// don't overwrite with null but not an error
if (parentCategory != null)
ParentCategory = parentCategory;
}
}
}

View File

@ -0,0 +1,82 @@
using System.Collections.Generic;
using System.Linq;
using Dinah.Core;
namespace DataLayer
{
public class Contributor
{
// contributors search links are just name with url-encoding. space can be + or %20
// author search link: /search?searchAuthor=Robert+Bevan
// narrator search link: /search?searchNarrator=Robert+Bevan
// can also search multiples. concat with comma before url encode
// id.s
// ----
// https://www.audible.com/author/Neil-Gaiman/B000AQ01G2 == https://www.audible.com/author/B000AQ01G2
// goes to summary page
// at bottom "See all titles by Neil Gaiman" goes to https://www.audible.com/search?searchAuthor=Neil+Gaiman
// some authors have no id. simply goes to https://www.audible.com/search?searchAuthor=Rufus+Fears
// all narrators have no id: https://www.audible.com/search?searchNarrator=Neil+Gaiman
internal int ContributorId { get; private set; }
public string Name { get; private set; }
private HashSet<BookContributor> _booksLink;
public IEnumerable<BookContributor> BooksLink => _booksLink?.ToList();
private Contributor() { }
public Contributor(string name)
{
ArgumentValidator.EnsureNotNullOrWhiteSpace(name, nameof(name));
_booksLink = new HashSet<BookContributor>();
Name = name;
}
public string AudibleAuthorId { get; private set; }
public void UpdateAudibleAuthorId(string authorId)
{
// don't overwrite with null or whitespace but not an error
if (!string.IsNullOrWhiteSpace(authorId))
AudibleAuthorId = authorId;
}
#region // AudibleAuthorId refactor: separate author-specific info. overkill for a single optional string
///// <summary>Most authors in Audible have a unique id</summary>
//public AudibleAuthorProperty AudibleAuthorProperty { get; private set; }
//public void UpdateAuthorId(string authorId, LibationContext context = null)
//{
// if (authorId == null)
// return;
// if (AudibleAuthorProperty != null)
// {
// AudibleAuthorProperty.UpdateAudibleAuthorId(authorId);
// return;
// }
// if (context == null)
// throw new ArgumentNullException(nameof(context), "You must provide a context");
// if (context.Contributors.Find(ContributorId) == null)
// throw new InvalidOperationException("Could not update audible author id.");
// var audibleAuthorProperty = new AudibleAuthorProperty();
// audibleAuthorProperty.UpdateAudibleAuthorId(authorId);
// context.AuthorProperties.Add(audibleAuthorProperty);
//}
//public class AudibleAuthorProperty
//{
// public int ContributorId { get; private set; }
// public Contributor Contributor { get; set; }
// public string AudibleAuthorId { get; private set; }
// public void UpdateAudibleAuthorId(string authorId)
// {
// if (!string.IsNullOrWhiteSpace(authorId))
// AudibleAuthorId = authorId;
// }
//}
//// ...and create EF table config
#endregion
}
}

View File

@ -0,0 +1,27 @@
using System;
using Dinah.Core;
namespace DataLayer
{
public class LibraryBook
{
internal int BookId { get; private set; }
public Book Book { get; private set; }
public DateTime DateAdded { get; private set; }
/// <summary>For downloading AAX file</summary>
public string DownloadBookLink { get; private set; }
private LibraryBook() { }
public LibraryBook(Book book, DateTime dateAdded
, string downloadBookLink = null
)
{
ArgumentValidator.EnsureNotNull(book, nameof(book));
Book = book;
DateAdded = dateAdded;
DownloadBookLink = downloadBookLink;
}
}
}

View File

@ -5,14 +5,14 @@ using Dinah.Core;
namespace DataLayer
{
/// <summary>Parameterless ctor and setters should be used by EF only. Everything else should treat it as immutable</summary>
public class Rating : ValueObject_Static<Rating>, IComparable<Rating>, IComparable
public class Rating : ValueObject_Static<Rating>
{
public float OverallRating { get; private set; }
public float PerformanceRating { get; private set; }
public float StoryRating { get; private set; }
private Rating() { }
public Rating(float overallRating, float performanceRating, float storyRating)
internal Rating(float overallRating, float performanceRating, float storyRating)
{
OverallRating = overallRating;
PerformanceRating = performanceRating;
@ -38,16 +38,39 @@ namespace DataLayer
yield return StoryRating;
}
public override string ToString() => $"Overall={OverallRating} Perf={PerformanceRating} Story={StoryRating}";
public float FirstScore
=> OverallRating > 0 ? OverallRating
: PerformanceRating > 0 ? PerformanceRating
: StoryRating;
public int CompareTo(Rating other)
/// <summary>character: ★</summary>
const char STAR = '\u2605';
/// <summary>character: ½</summary>
const char HALF = '\u00BD';
string getStars(float score)
{
var compare = OverallRating.CompareTo(other.OverallRating);
if (compare != 0) return compare;
compare = PerformanceRating.CompareTo(other.PerformanceRating);
if (compare != 0) return compare;
return StoryRating.CompareTo(other.StoryRating);
var fullStars = (int)Math.Floor(score);
var starString = "".PadLeft(fullStars, STAR);
if (score - fullStars == 0.5f)
starString += HALF;
return starString;
}
public string ToStarString()
{
var items = new List<string>();
if (OverallRating > 0)
items.Add($"Overall: {getStars(OverallRating)}");
if (PerformanceRating > 0)
items.Add($"Perform: {getStars(PerformanceRating)}");
if (StoryRating > 0)
items.Add($"Story: {getStars(StoryRating)}");
return string.Join("\r\n", items);
}
public int CompareTo(object obj) => obj is Rating second ? CompareTo(second) : -1;
}
}

View File

@ -0,0 +1,4 @@
namespace DataLayer
{
public enum Role { Author = 1, Narrator = 2, Publisher = 3 }
}

Some files were not shown because too many files have changed in this diff Show More