Merge branch 'Preview4_0' of https://github.com/TorqueGameEngines/Torque3D into fix/terrainpainting

# Conflicts:
#	Engine/source/gui/worldEditor/terrainEditor.cpp
This commit is contained in:
AzaezelX 2021-01-27 23:38:14 -06:00
commit d4cd1edad7
374 changed files with 76201 additions and 53157 deletions

View file

@ -1,5 +1,9 @@
build*/
winbuild/
win64build/
openal-soft.kdev4
.kdev4/
winbuild
win64build
## kdevelop
*.kdev4
## qt-creator
CMakeLists.txt.user*

View file

@ -1,13 +1,19 @@
language: c
language: cpp
matrix:
include:
- os: linux
dist: trusty
dist: xenial
- os: linux
dist: trusty
env:
- BUILD_ANDROID=true
- os: freebsd
compiler: clang
- os: osx
- os: osx
osx_image: xcode11
env:
- BUILD_IOS=true
sudo: required
install:
- >
@ -24,19 +30,44 @@ install:
fi
- >
if [[ "${TRAVIS_OS_NAME}" == "linux" && "${BUILD_ANDROID}" == "true" ]]; then
curl -o ~/android-ndk.zip https://dl.google.com/android/repository/android-ndk-r15-linux-x86_64.zip
curl -o ~/android-ndk.zip https://dl.google.com/android/repository/android-ndk-r21-linux-x86_64.zip
unzip -q ~/android-ndk.zip -d ~ \
'android-ndk-r15/build/cmake/*' \
'android-ndk-r15/build/core/toolchains/arm-linux-androideabi-*/*' \
'android-ndk-r15/platforms/android-14/arch-arm/*' \
'android-ndk-r15/source.properties' \
'android-ndk-r15/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a/*' \
'android-ndk-r15/sources/cxx-stl/gnu-libstdc++/4.9/include/*' \
'android-ndk-r15/sysroot/*' \
'android-ndk-r15/toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64/*' \
'android-ndk-r15/toolchains/llvm/prebuilt/linux-x86_64/*'
'android-ndk-r21/build/cmake/*' \
'android-ndk-r21/build/core/toolchains/arm-linux-androideabi-*/*' \
'android-ndk-r21/platforms/android-16/arch-arm/*' \
'android-ndk-r21/source.properties' \
'android-ndk-r21/sources/android/support/include/*' \
'android-ndk-r21/sources/cxx-stl/llvm-libc++/libs/armeabi-v7a/*' \
'android-ndk-r21/sources/cxx-stl/llvm-libc++/include/*' \
'android-ndk-r21/sysroot/*' \
'android-ndk-r21/toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64/*' \
'android-ndk-r21/toolchains/llvm/prebuilt/linux-x86_64/*'
export OBOE_LOC=~/oboe
git clone --depth 1 -b 1.3-stable https://github.com/google/oboe "$OBOE_LOC"
fi
- >
if [[ "${TRAVIS_OS_NAME}" == "freebsd" ]]; then
# Install Ninja as it's used downstream.
# Install dependencies for all supported backends.
# Install Qt5 dependency for alsoft-config.
# Install ffmpeg for examples.
sudo pkg install -y \
alsa-lib \
ffmpeg \
jackit \
libmysofa \
ninja \
portaudio \
pulseaudio \
qt5-buildtools \
qt5-qmake \
qt5-widgets \
sdl2 \
sndio \
$NULL
fi
script:
- cmake --version
- >
if [[ "${TRAVIS_OS_NAME}" == "linux" && -z "${BUILD_ANDROID}" ]]; then
cmake \
@ -51,16 +82,43 @@ script:
- >
if [[ "${TRAVIS_OS_NAME}" == "linux" && "${BUILD_ANDROID}" == "true" ]]; then
cmake \
-DCMAKE_TOOLCHAIN_FILE=~/android-ndk-r15/build/cmake/android.toolchain.cmake \
-DANDROID_STL=c++_shared \
-DCMAKE_TOOLCHAIN_FILE=~/android-ndk-r21/build/cmake/android.toolchain.cmake \
-DOBOE_SOURCE="$OBOE_LOC" \
-DALSOFT_REQUIRE_OBOE=ON \
-DALSOFT_REQUIRE_OPENSL=ON \
-DALSOFT_EMBED_HRTF_DATA=YES \
.
fi
- >
if [[ "${TRAVIS_OS_NAME}" == "osx" ]]; then
if [[ "${TRAVIS_OS_NAME}" == "freebsd" ]]; then
cmake -GNinja \
-DALSOFT_REQUIRE_ALSA=ON \
-DALSOFT_REQUIRE_JACK=ON \
-DALSOFT_REQUIRE_OSS=ON \
-DALSOFT_REQUIRE_PORTAUDIO=ON \
-DALSOFT_REQUIRE_PULSEAUDIO=ON \
-DALSOFT_REQUIRE_SDL2=ON \
-DALSOFT_REQUIRE_SNDIO=ON \
-DALSOFT_EMBED_HRTF_DATA=YES \
.
fi
- >
if [[ "${TRAVIS_OS_NAME}" == "osx" && -z "${BUILD_IOS}" ]]; then
cmake \
-DALSOFT_REQUIRE_COREAUDIO=ON \
-DALSOFT_EMBED_HRTF_DATA=YES \
.
fi
- make -j2
- >
if [[ "${TRAVIS_OS_NAME}" == "osx" && "${BUILD_IOS}" == "true" ]]; then
cmake \
-GXcode \
-DCMAKE_SYSTEM_NAME=iOS \
-DALSOFT_OSX_FRAMEWORK=ON \
-DALSOFT_REQUIRE_COREAUDIO=ON \
-DALSOFT_EMBED_HRTF_DATA=YES \
"-DCMAKE_OSX_ARCHITECTURES=armv7;arm64" \
.
fi
- cmake --build . --clean-first

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,371 @@
#ifndef ALC_MAIN_H
#define ALC_MAIN_H
#include <algorithm>
#include <array>
#include <atomic>
#include <bitset>
#include <chrono>
#include <cstdint>
#include <cstddef>
#include <memory>
#include <mutex>
#include <string>
#include <thread>
#include <utility>
#include "AL/al.h"
#include "AL/alc.h"
#include "AL/alext.h"
#include "almalloc.h"
#include "alnumeric.h"
#include "alspan.h"
#include "atomic.h"
#include "core/ambidefs.h"
#include "core/bufferline.h"
#include "core/devformat.h"
#include "core/filters/splitter.h"
#include "core/mixer/defs.h"
#include "hrtf.h"
#include "inprogext.h"
#include "intrusive_ptr.h"
#include "vector.h"
class BFormatDec;
struct ALbuffer;
struct ALeffect;
struct ALfilter;
struct BackendBase;
struct Compressor;
struct EffectState;
struct Uhj2Encoder;
struct bs2b;
using uint = unsigned int;
#define MIN_OUTPUT_RATE 8000
#define MAX_OUTPUT_RATE 192000
#define DEFAULT_OUTPUT_RATE 44100
#define DEFAULT_UPDATE_SIZE 882 /* 20ms */
#define DEFAULT_NUM_UPDATES 3
enum class DeviceType : unsigned char {
Playback,
Capture,
Loopback
};
enum class RenderMode : unsigned char {
Normal,
Pairwise,
Hrtf
};
struct InputRemixMap {
struct TargetMix { Channel channel; float mix; };
Channel channel;
std::array<TargetMix,2> targets;
};
struct BufferSubList {
uint64_t FreeMask{~0_u64};
ALbuffer *Buffers{nullptr}; /* 64 */
BufferSubList() noexcept = default;
BufferSubList(const BufferSubList&) = delete;
BufferSubList(BufferSubList&& rhs) noexcept : FreeMask{rhs.FreeMask}, Buffers{rhs.Buffers}
{ rhs.FreeMask = ~0_u64; rhs.Buffers = nullptr; }
~BufferSubList();
BufferSubList& operator=(const BufferSubList&) = delete;
BufferSubList& operator=(BufferSubList&& rhs) noexcept
{ std::swap(FreeMask, rhs.FreeMask); std::swap(Buffers, rhs.Buffers); return *this; }
};
struct EffectSubList {
uint64_t FreeMask{~0_u64};
ALeffect *Effects{nullptr}; /* 64 */
EffectSubList() noexcept = default;
EffectSubList(const EffectSubList&) = delete;
EffectSubList(EffectSubList&& rhs) noexcept : FreeMask{rhs.FreeMask}, Effects{rhs.Effects}
{ rhs.FreeMask = ~0_u64; rhs.Effects = nullptr; }
~EffectSubList();
EffectSubList& operator=(const EffectSubList&) = delete;
EffectSubList& operator=(EffectSubList&& rhs) noexcept
{ std::swap(FreeMask, rhs.FreeMask); std::swap(Effects, rhs.Effects); return *this; }
};
struct FilterSubList {
uint64_t FreeMask{~0_u64};
ALfilter *Filters{nullptr}; /* 64 */
FilterSubList() noexcept = default;
FilterSubList(const FilterSubList&) = delete;
FilterSubList(FilterSubList&& rhs) noexcept : FreeMask{rhs.FreeMask}, Filters{rhs.Filters}
{ rhs.FreeMask = ~0_u64; rhs.Filters = nullptr; }
~FilterSubList();
FilterSubList& operator=(const FilterSubList&) = delete;
FilterSubList& operator=(FilterSubList&& rhs) noexcept
{ std::swap(FreeMask, rhs.FreeMask); std::swap(Filters, rhs.Filters); return *this; }
};
/* Maximum delay in samples for speaker distance compensation. */
#define MAX_DELAY_LENGTH 1024
struct DistanceComp {
struct ChanData {
float Gain{1.0f};
uint Length{0u}; /* Valid range is [0...MAX_DELAY_LENGTH). */
float *Buffer{nullptr};
};
std::array<ChanData,MAX_OUTPUT_CHANNELS> mChannels;
al::FlexArray<float,16> mSamples;
DistanceComp(size_t count) : mSamples{count} { }
static std::unique_ptr<DistanceComp> Create(size_t numsamples)
{ return std::unique_ptr<DistanceComp>{new(FamCount(numsamples)) DistanceComp{numsamples}}; }
DEF_FAM_NEWDEL(DistanceComp, mSamples)
};
struct BFChannelConfig {
float Scale;
uint Index;
};
struct MixParams {
/* Coefficient channel mapping for mixing to the buffer. */
std::array<BFChannelConfig,MAX_OUTPUT_CHANNELS> AmbiMap{};
al::span<FloatBufferLine> Buffer;
};
struct RealMixParams {
al::span<const InputRemixMap> RemixMap;
std::array<uint,MaxChannels> ChannelIndex{};
al::span<FloatBufferLine> Buffer;
};
enum {
// Frequency was requested by the app or config file
FrequencyRequest,
// Channel configuration was requested by the config file
ChannelsRequest,
// Sample type was requested by the config file
SampleTypeRequest,
// Specifies if the DSP is paused at user request
DevicePaused,
// Specifies if the device is currently running
DeviceRunning,
DeviceFlagsCount
};
struct ALCdevice : public al::intrusive_ref<ALCdevice> {
std::atomic<bool> Connected{true};
const DeviceType Type{};
uint Frequency{};
uint UpdateSize{};
uint BufferSize{};
DevFmtChannels FmtChans{};
DevFmtType FmtType{};
bool IsHeadphones{false};
uint mAmbiOrder{0};
float mXOverFreq{400.0f};
/* For DevFmtAmbi* output only, specifies the channel order and
* normalization.
*/
DevAmbiLayout mAmbiLayout{DevAmbiLayout::Default};
DevAmbiScaling mAmbiScale{DevAmbiScaling::Default};
std::string DeviceName;
// Device flags
std::bitset<DeviceFlagsCount> Flags{};
// Maximum number of sources that can be created
uint SourcesMax{};
// Maximum number of slots that can be created
uint AuxiliaryEffectSlotMax{};
/* Rendering mode. */
RenderMode mRenderMode{RenderMode::Normal};
/* The average speaker distance as determined by the ambdec configuration,
* HRTF data set, or the NFC-HOA reference delay. Only used for NFC.
*/
float AvgSpeakerDist{0.0f};
uint SamplesDone{0u};
std::chrono::nanoseconds ClockBase{0};
std::chrono::nanoseconds FixedLatency{0};
/* Temp storage used for mixer processing. */
alignas(16) float SourceData[BufferLineSize + MaxResamplerPadding];
alignas(16) float ResampledData[BufferLineSize];
alignas(16) float FilteredData[BufferLineSize];
union {
alignas(16) float HrtfSourceData[BufferLineSize + HrtfHistoryLength];
alignas(16) float NfcSampleData[BufferLineSize];
};
/* Persistent storage for HRTF mixing. */
alignas(16) float2 HrtfAccumData[BufferLineSize + HrirLength + HrtfDirectDelay];
/* Mixing buffer used by the Dry mix and Real output. */
al::vector<FloatBufferLine, 16> MixBuffer;
/* The "dry" path corresponds to the main output. */
MixParams Dry;
uint NumChannelsPerOrder[MaxAmbiOrder+1]{};
/* "Real" output, which will be written to the device buffer. May alias the
* dry buffer.
*/
RealMixParams RealOut;
/* HRTF state and info */
std::unique_ptr<DirectHrtfState> mHrtfState;
al::intrusive_ptr<HrtfStore> mHrtf;
uint mIrSize{0};
/* Ambisonic-to-UHJ encoder */
std::unique_ptr<Uhj2Encoder> Uhj_Encoder;
/* Ambisonic decoder for speakers */
std::unique_ptr<BFormatDec> AmbiDecoder;
/* Stereo-to-binaural filter */
std::unique_ptr<bs2b> Bs2b;
using PostProc = void(ALCdevice::*)(const size_t SamplesToDo);
PostProc PostProcess{nullptr};
std::unique_ptr<Compressor> Limiter;
/* Delay buffers used to compensate for speaker distances. */
std::unique_ptr<DistanceComp> ChannelDelays;
/* Dithering control. */
float DitherDepth{0.0f};
uint DitherSeed{0u};
/* Running count of the mixer invocations, in 31.1 fixed point. This
* actually increments *twice* when mixing, first at the start and then at
* the end, so the bottom bit indicates if the device is currently mixing
* and the upper bits indicates how many mixes have been done.
*/
RefCount MixCount{0u};
// Contexts created on this device
std::atomic<al::FlexArray<ALCcontext*>*> mContexts{nullptr};
/* This lock protects the device state (format, update size, etc) from
* being from being changed in multiple threads, or being accessed while
* being changed. It's also used to serialize calls to the backend.
*/
std::mutex StateLock;
std::unique_ptr<BackendBase> Backend;
ALCuint NumMonoSources{};
ALCuint NumStereoSources{};
ALCuint NumAuxSends{};
std::string HrtfName;
al::vector<std::string> HrtfList;
ALCenum HrtfStatus{ALC_FALSE};
ALCenum LimiterState{ALC_DONT_CARE_SOFT};
std::atomic<ALCenum> LastError{ALC_NO_ERROR};
// Map of Buffers for this device
std::mutex BufferLock;
al::vector<BufferSubList> BufferList;
// Map of Effects for this device
std::mutex EffectLock;
al::vector<EffectSubList> EffectList;
// Map of Filters for this device
std::mutex FilterLock;
al::vector<FilterSubList> FilterList;
ALCdevice(DeviceType type);
ALCdevice(const ALCdevice&) = delete;
ALCdevice& operator=(const ALCdevice&) = delete;
~ALCdevice();
uint bytesFromFmt() const noexcept { return BytesFromDevFmt(FmtType); }
uint channelsFromFmt() const noexcept { return ChannelsFromDevFmt(FmtChans, mAmbiOrder); }
uint frameSizeFromFmt() const noexcept { return bytesFromFmt() * channelsFromFmt(); }
uint waitForMix() const noexcept
{
uint refcount;
while((refcount=MixCount.load(std::memory_order_acquire))&1) {
}
return refcount;
}
void ProcessHrtf(const size_t SamplesToDo);
void ProcessAmbiDec(const size_t SamplesToDo);
void ProcessAmbiDecStablized(const size_t SamplesToDo);
void ProcessUhj(const size_t SamplesToDo);
void ProcessBs2b(const size_t SamplesToDo);
inline void postProcess(const size_t SamplesToDo)
{ if LIKELY(PostProcess) (this->*PostProcess)(SamplesToDo); }
void renderSamples(void *outBuffer, const uint numSamples, const size_t frameStep);
/* Caller must lock the device state, and the mixer must not be running. */
[[gnu::format(printf,2,3)]] void handleDisconnect(const char *msg, ...);
DEF_NEWDEL(ALCdevice)
};
/* Must be less than 15 characters (16 including terminating null) for
* compatibility with pthread_setname_np limitations. */
#define MIXER_THREAD_NAME "alsoft-mixer"
#define RECORD_THREAD_NAME "alsoft-record"
extern int RTPrioLevel;
void SetRTPriority(void);
/**
* Returns the index for the given channel name (e.g. FrontCenter), or
* INVALID_CHANNEL_INDEX if it doesn't exist.
*/
inline uint GetChannelIdxByName(const RealMixParams &real, Channel chan) noexcept
{ return real.ChannelIndex[chan]; }
#define INVALID_CHANNEL_INDEX ~0u
al::vector<std::string> SearchDataFiles(const char *match, const char *subdir);
#endif

View file

@ -1,675 +0,0 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 1999-2007 by authors.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#ifdef _WIN32
#ifdef __MINGW32__
#define _WIN32_IE 0x501
#else
#define _WIN32_IE 0x400
#endif
#endif
#include "config.h"
#include <stdlib.h>
#include <stdio.h>
#include <ctype.h>
#include <string.h>
#ifdef _WIN32_IE
#include <windows.h>
#include <shlobj.h>
#endif
#include "alMain.h"
#include "alconfig.h"
#include "compat.h"
#include "bool.h"
typedef struct ConfigEntry {
char *key;
char *value;
} ConfigEntry;
typedef struct ConfigBlock {
ConfigEntry *entries;
unsigned int entryCount;
} ConfigBlock;
static ConfigBlock cfgBlock;
static char *lstrip(char *line)
{
while(isspace(line[0]))
line++;
return line;
}
static char *rstrip(char *line)
{
size_t len = strlen(line);
while(len > 0 && isspace(line[len-1]))
len--;
line[len] = 0;
return line;
}
static int readline(FILE *f, char **output, size_t *maxlen)
{
size_t len = 0;
int c;
while((c=fgetc(f)) != EOF && (c == '\r' || c == '\n'))
;
if(c == EOF)
return 0;
do {
if(len+1 >= *maxlen)
{
void *temp = NULL;
size_t newmax;
newmax = (*maxlen ? (*maxlen)<<1 : 32);
if(newmax > *maxlen)
temp = realloc(*output, newmax);
if(!temp)
{
ERR("Failed to realloc "SZFMT" bytes from "SZFMT"!\n", newmax, *maxlen);
return 0;
}
*output = temp;
*maxlen = newmax;
}
(*output)[len++] = c;
(*output)[len] = '\0';
} while((c=fgetc(f)) != EOF && c != '\r' && c != '\n');
return 1;
}
static char *expdup(const char *str)
{
char *output = NULL;
size_t maxlen = 0;
size_t len = 0;
while(*str != '\0')
{
const char *addstr;
size_t addstrlen;
size_t i;
if(str[0] != '$')
{
const char *next = strchr(str, '$');
addstr = str;
addstrlen = next ? (size_t)(next-str) : strlen(str);
str += addstrlen;
}
else
{
str++;
if(*str == '$')
{
const char *next = strchr(str+1, '$');
addstr = str;
addstrlen = next ? (size_t)(next-str) : strlen(str);
str += addstrlen;
}
else
{
bool hasbraces;
char envname[1024];
size_t k = 0;
hasbraces = (*str == '{');
if(hasbraces) str++;
while((isalnum(*str) || *str == '_') && k < sizeof(envname)-1)
envname[k++] = *(str++);
envname[k++] = '\0';
if(hasbraces && *str != '}')
continue;
if(hasbraces) str++;
if((addstr=getenv(envname)) == NULL)
continue;
addstrlen = strlen(addstr);
}
}
if(addstrlen == 0)
continue;
if(addstrlen >= maxlen-len)
{
void *temp = NULL;
size_t newmax;
newmax = len+addstrlen+1;
if(newmax > maxlen)
temp = realloc(output, newmax);
if(!temp)
{
ERR("Failed to realloc "SZFMT" bytes from "SZFMT"!\n", newmax, maxlen);
return output;
}
output = temp;
maxlen = newmax;
}
for(i = 0;i < addstrlen;i++)
output[len++] = addstr[i];
output[len] = '\0';
}
return output ? output : calloc(1, 1);
}
static void LoadConfigFromFile(FILE *f)
{
char curSection[128] = "";
char *buffer = NULL;
size_t maxlen = 0;
ConfigEntry *ent;
while(readline(f, &buffer, &maxlen))
{
char *line, *comment;
char key[256] = "";
char value[256] = "";
line = rstrip(lstrip(buffer));
if(!line[0]) continue;
if(line[0] == '[')
{
char *section = line+1;
char *endsection;
endsection = strchr(section, ']');
if(!endsection || section == endsection)
{
ERR("config parse error: bad line \"%s\"\n", line);
continue;
}
if(endsection[1] != 0)
{
char *end = endsection+1;
while(isspace(*end))
++end;
if(*end != 0 && *end != '#')
{
ERR("config parse error: bad line \"%s\"\n", line);
continue;
}
}
*endsection = 0;
if(strcasecmp(section, "general") == 0)
curSection[0] = 0;
else
{
size_t len, p = 0;
do {
char *nextp = strchr(section, '%');
if(!nextp)
{
strncpy(curSection+p, section, sizeof(curSection)-1-p);
break;
}
len = nextp - section;
if(len > sizeof(curSection)-1-p)
len = sizeof(curSection)-1-p;
strncpy(curSection+p, section, len);
p += len;
section = nextp;
if(((section[1] >= '0' && section[1] <= '9') ||
(section[1] >= 'a' && section[1] <= 'f') ||
(section[1] >= 'A' && section[1] <= 'F')) &&
((section[2] >= '0' && section[2] <= '9') ||
(section[2] >= 'a' && section[2] <= 'f') ||
(section[2] >= 'A' && section[2] <= 'F')))
{
unsigned char b = 0;
if(section[1] >= '0' && section[1] <= '9')
b = (section[1]-'0') << 4;
else if(section[1] >= 'a' && section[1] <= 'f')
b = (section[1]-'a'+0xa) << 4;
else if(section[1] >= 'A' && section[1] <= 'F')
b = (section[1]-'A'+0x0a) << 4;
if(section[2] >= '0' && section[2] <= '9')
b |= (section[2]-'0');
else if(section[2] >= 'a' && section[2] <= 'f')
b |= (section[2]-'a'+0xa);
else if(section[2] >= 'A' && section[2] <= 'F')
b |= (section[2]-'A'+0x0a);
if(p < sizeof(curSection)-1)
curSection[p++] = b;
section += 3;
}
else if(section[1] == '%')
{
if(p < sizeof(curSection)-1)
curSection[p++] = '%';
section += 2;
}
else
{
if(p < sizeof(curSection)-1)
curSection[p++] = '%';
section += 1;
}
if(p < sizeof(curSection)-1)
curSection[p] = 0;
} while(p < sizeof(curSection)-1 && *section != 0);
curSection[sizeof(curSection)-1] = 0;
}
continue;
}
comment = strchr(line, '#');
if(comment) *(comment++) = 0;
if(!line[0]) continue;
if(sscanf(line, "%255[^=] = \"%255[^\"]\"", key, value) == 2 ||
sscanf(line, "%255[^=] = '%255[^\']'", key, value) == 2 ||
sscanf(line, "%255[^=] = %255[^\n]", key, value) == 2)
{
/* sscanf doesn't handle '' or "" as empty values, so clip it
* manually. */
if(strcmp(value, "\"\"") == 0 || strcmp(value, "''") == 0)
value[0] = 0;
}
else if(sscanf(line, "%255[^=] %255[=]", key, value) == 2)
{
/* Special case for 'key =' */
value[0] = 0;
}
else
{
ERR("config parse error: malformed option line: \"%s\"\n\n", line);
continue;
}
rstrip(key);
if(curSection[0] != 0)
{
size_t len = strlen(curSection);
memmove(&key[len+1], key, sizeof(key)-1-len);
key[len] = '/';
memcpy(key, curSection, len);
}
/* Check if we already have this option set */
ent = cfgBlock.entries;
while((unsigned int)(ent-cfgBlock.entries) < cfgBlock.entryCount)
{
if(strcasecmp(ent->key, key) == 0)
break;
ent++;
}
if((unsigned int)(ent-cfgBlock.entries) >= cfgBlock.entryCount)
{
/* Allocate a new option entry */
ent = realloc(cfgBlock.entries, (cfgBlock.entryCount+1)*sizeof(ConfigEntry));
if(!ent)
{
ERR("config parse error: error reallocating config entries\n");
continue;
}
cfgBlock.entries = ent;
ent = cfgBlock.entries + cfgBlock.entryCount;
cfgBlock.entryCount++;
ent->key = strdup(key);
ent->value = NULL;
}
free(ent->value);
ent->value = expdup(value);
TRACE("found '%s' = '%s'\n", ent->key, ent->value);
}
free(buffer);
}
#ifdef _WIN32
void ReadALConfig(void)
{
al_string ppath = AL_STRING_INIT_STATIC();
WCHAR buffer[MAX_PATH];
const WCHAR *str;
FILE *f;
if(SHGetSpecialFolderPathW(NULL, buffer, CSIDL_APPDATA, FALSE) != FALSE)
{
al_string filepath = AL_STRING_INIT_STATIC();
alstr_copy_wcstr(&filepath, buffer);
alstr_append_cstr(&filepath, "\\alsoft.ini");
TRACE("Loading config %s...\n", alstr_get_cstr(filepath));
f = al_fopen(alstr_get_cstr(filepath), "rt");
if(f)
{
LoadConfigFromFile(f);
fclose(f);
}
alstr_reset(&filepath);
}
GetProcBinary(&ppath, NULL);
if(!alstr_empty(ppath))
{
alstr_append_cstr(&ppath, "\\alsoft.ini");
TRACE("Loading config %s...\n", alstr_get_cstr(ppath));
f = al_fopen(alstr_get_cstr(ppath), "r");
if(f)
{
LoadConfigFromFile(f);
fclose(f);
}
}
if((str=_wgetenv(L"ALSOFT_CONF")) != NULL && *str)
{
al_string filepath = AL_STRING_INIT_STATIC();
alstr_copy_wcstr(&filepath, str);
TRACE("Loading config %s...\n", alstr_get_cstr(filepath));
f = al_fopen(alstr_get_cstr(filepath), "rt");
if(f)
{
LoadConfigFromFile(f);
fclose(f);
}
alstr_reset(&filepath);
}
alstr_reset(&ppath);
}
#else
void ReadALConfig(void)
{
al_string confpaths = AL_STRING_INIT_STATIC();
al_string fname = AL_STRING_INIT_STATIC();
const char *str;
FILE *f;
str = "/etc/openal/alsoft.conf";
TRACE("Loading config %s...\n", str);
f = al_fopen(str, "r");
if(f)
{
LoadConfigFromFile(f);
fclose(f);
}
if(!(str=getenv("XDG_CONFIG_DIRS")) || str[0] == 0)
str = "/etc/xdg";
alstr_copy_cstr(&confpaths, str);
/* Go through the list in reverse, since "the order of base directories
* denotes their importance; the first directory listed is the most
* important". Ergo, we need to load the settings from the later dirs
* first so that the settings in the earlier dirs override them.
*/
while(!alstr_empty(confpaths))
{
char *next = strrchr(alstr_get_cstr(confpaths), ':');
if(next)
{
size_t len = next - alstr_get_cstr(confpaths);
alstr_copy_cstr(&fname, next+1);
VECTOR_RESIZE(confpaths, len, len+1);
VECTOR_ELEM(confpaths, len) = 0;
}
else
{
alstr_reset(&fname);
fname = confpaths;
AL_STRING_INIT(confpaths);
}
if(alstr_empty(fname) || VECTOR_FRONT(fname) != '/')
WARN("Ignoring XDG config dir: %s\n", alstr_get_cstr(fname));
else
{
if(VECTOR_BACK(fname) != '/') alstr_append_cstr(&fname, "/alsoft.conf");
else alstr_append_cstr(&fname, "alsoft.conf");
TRACE("Loading config %s...\n", alstr_get_cstr(fname));
f = al_fopen(alstr_get_cstr(fname), "r");
if(f)
{
LoadConfigFromFile(f);
fclose(f);
}
}
alstr_clear(&fname);
}
if((str=getenv("HOME")) != NULL && *str)
{
alstr_copy_cstr(&fname, str);
if(VECTOR_BACK(fname) != '/') alstr_append_cstr(&fname, "/.alsoftrc");
else alstr_append_cstr(&fname, ".alsoftrc");
TRACE("Loading config %s...\n", alstr_get_cstr(fname));
f = al_fopen(alstr_get_cstr(fname), "r");
if(f)
{
LoadConfigFromFile(f);
fclose(f);
}
}
if((str=getenv("XDG_CONFIG_HOME")) != NULL && str[0] != 0)
{
alstr_copy_cstr(&fname, str);
if(VECTOR_BACK(fname) != '/') alstr_append_cstr(&fname, "/alsoft.conf");
else alstr_append_cstr(&fname, "alsoft.conf");
}
else
{
alstr_clear(&fname);
if((str=getenv("HOME")) != NULL && str[0] != 0)
{
alstr_copy_cstr(&fname, str);
if(VECTOR_BACK(fname) != '/') alstr_append_cstr(&fname, "/.config/alsoft.conf");
else alstr_append_cstr(&fname, ".config/alsoft.conf");
}
}
if(!alstr_empty(fname))
{
TRACE("Loading config %s...\n", alstr_get_cstr(fname));
f = al_fopen(alstr_get_cstr(fname), "r");
if(f)
{
LoadConfigFromFile(f);
fclose(f);
}
}
alstr_clear(&fname);
GetProcBinary(&fname, NULL);
if(!alstr_empty(fname))
{
if(VECTOR_BACK(fname) != '/') alstr_append_cstr(&fname, "/alsoft.conf");
else alstr_append_cstr(&fname, "alsoft.conf");
TRACE("Loading config %s...\n", alstr_get_cstr(fname));
f = al_fopen(alstr_get_cstr(fname), "r");
if(f)
{
LoadConfigFromFile(f);
fclose(f);
}
}
if((str=getenv("ALSOFT_CONF")) != NULL && *str)
{
TRACE("Loading config %s...\n", str);
f = al_fopen(str, "r");
if(f)
{
LoadConfigFromFile(f);
fclose(f);
}
}
alstr_reset(&fname);
alstr_reset(&confpaths);
}
#endif
void FreeALConfig(void)
{
unsigned int i;
for(i = 0;i < cfgBlock.entryCount;i++)
{
free(cfgBlock.entries[i].key);
free(cfgBlock.entries[i].value);
}
free(cfgBlock.entries);
}
const char *GetConfigValue(const char *devName, const char *blockName, const char *keyName, const char *def)
{
unsigned int i;
char key[256];
if(!keyName)
return def;
if(blockName && strcasecmp(blockName, "general") != 0)
{
if(devName)
snprintf(key, sizeof(key), "%s/%s/%s", blockName, devName, keyName);
else
snprintf(key, sizeof(key), "%s/%s", blockName, keyName);
}
else
{
if(devName)
snprintf(key, sizeof(key), "%s/%s", devName, keyName);
else
{
strncpy(key, keyName, sizeof(key)-1);
key[sizeof(key)-1] = 0;
}
}
for(i = 0;i < cfgBlock.entryCount;i++)
{
if(strcmp(cfgBlock.entries[i].key, key) == 0)
{
TRACE("Found %s = \"%s\"\n", key, cfgBlock.entries[i].value);
if(cfgBlock.entries[i].value[0])
return cfgBlock.entries[i].value;
return def;
}
}
if(!devName)
{
TRACE("Key %s not found\n", key);
return def;
}
return GetConfigValue(NULL, blockName, keyName, def);
}
int ConfigValueExists(const char *devName, const char *blockName, const char *keyName)
{
const char *val = GetConfigValue(devName, blockName, keyName, "");
return !!val[0];
}
int ConfigValueStr(const char *devName, const char *blockName, const char *keyName, const char **ret)
{
const char *val = GetConfigValue(devName, blockName, keyName, "");
if(!val[0]) return 0;
*ret = val;
return 1;
}
int ConfigValueInt(const char *devName, const char *blockName, const char *keyName, int *ret)
{
const char *val = GetConfigValue(devName, blockName, keyName, "");
if(!val[0]) return 0;
*ret = strtol(val, NULL, 0);
return 1;
}
int ConfigValueUInt(const char *devName, const char *blockName, const char *keyName, unsigned int *ret)
{
const char *val = GetConfigValue(devName, blockName, keyName, "");
if(!val[0]) return 0;
*ret = strtoul(val, NULL, 0);
return 1;
}
int ConfigValueFloat(const char *devName, const char *blockName, const char *keyName, float *ret)
{
const char *val = GetConfigValue(devName, blockName, keyName, "");
if(!val[0]) return 0;
#ifdef HAVE_STRTOF
*ret = strtof(val, NULL);
#else
*ret = (float)strtod(val, NULL);
#endif
return 1;
}
int ConfigValueBool(const char *devName, const char *blockName, const char *keyName, int *ret)
{
const char *val = GetConfigValue(devName, blockName, keyName, "");
if(!val[0]) return 0;
*ret = (strcasecmp(val, "true") == 0 || strcasecmp(val, "yes") == 0 ||
strcasecmp(val, "on") == 0 || atoi(val) != 0);
return 1;
}
int GetConfigValueBool(const char *devName, const char *blockName, const char *keyName, int def)
{
const char *val = GetConfigValue(devName, blockName, keyName, "");
if(!val[0]) return !!def;
return (strcasecmp(val, "true") == 0 || strcasecmp(val, "yes") == 0 ||
strcasecmp(val, "on") == 0 || atoi(val) != 0);
}

View file

@ -0,0 +1,542 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 1999-2007 by authors.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include "alconfig.h"
#include <cstdlib>
#include <cctype>
#include <cstring>
#ifdef _WIN32
#include <windows.h>
#include <shlobj.h>
#endif
#ifdef __APPLE__
#include <CoreFoundation/CoreFoundation.h>
#endif
#include <algorithm>
#include <cstdio>
#include <string>
#include <utility>
#include "alfstream.h"
#include "alstring.h"
#include "compat.h"
#include "core/logging.h"
#include "strutils.h"
#include "vector.h"
namespace {
struct ConfigEntry {
std::string key;
std::string value;
};
al::vector<ConfigEntry> ConfOpts;
std::string &lstrip(std::string &line)
{
size_t pos{0};
while(pos < line.length() && std::isspace(line[pos]))
++pos;
line.erase(0, pos);
return line;
}
bool readline(std::istream &f, std::string &output)
{
while(f.good() && f.peek() == '\n')
f.ignore();
return std::getline(f, output) && !output.empty();
}
std::string expdup(const char *str)
{
std::string output;
std::string envval;
while(*str != '\0')
{
const char *addstr;
size_t addstrlen;
if(str[0] != '$')
{
const char *next = std::strchr(str, '$');
addstr = str;
addstrlen = next ? static_cast<size_t>(next-str) : std::strlen(str);
str += addstrlen;
}
else
{
str++;
if(*str == '$')
{
const char *next = std::strchr(str+1, '$');
addstr = str;
addstrlen = next ? static_cast<size_t>(next-str) : std::strlen(str);
str += addstrlen;
}
else
{
const bool hasbraces{(*str == '{')};
if(hasbraces) str++;
const char *envstart = str;
while(std::isalnum(*str) || *str == '_')
++str;
if(hasbraces && *str != '}')
continue;
const std::string envname{envstart, str};
if(hasbraces) str++;
envval = al::getenv(envname.c_str()).value_or(std::string{});
addstr = envval.data();
addstrlen = envval.length();
}
}
if(addstrlen == 0)
continue;
output.append(addstr, addstrlen);
}
return output;
}
void LoadConfigFromFile(std::istream &f)
{
std::string curSection;
std::string buffer;
while(readline(f, buffer))
{
if(lstrip(buffer).empty())
continue;
if(buffer[0] == '[')
{
char *line{&buffer[0]};
char *section = line+1;
char *endsection;
endsection = std::strchr(section, ']');
if(!endsection || section == endsection)
{
ERR(" config parse error: bad line \"%s\"\n", line);
continue;
}
if(endsection[1] != 0)
{
char *end = endsection+1;
while(std::isspace(*end))
++end;
if(*end != 0 && *end != '#')
{
ERR(" config parse error: bad line \"%s\"\n", line);
continue;
}
}
*endsection = 0;
curSection.clear();
if(al::strcasecmp(section, "general") != 0)
{
do {
char *nextp = std::strchr(section, '%');
if(!nextp)
{
curSection += section;
break;
}
curSection.append(section, nextp);
section = nextp;
if(((section[1] >= '0' && section[1] <= '9') ||
(section[1] >= 'a' && section[1] <= 'f') ||
(section[1] >= 'A' && section[1] <= 'F')) &&
((section[2] >= '0' && section[2] <= '9') ||
(section[2] >= 'a' && section[2] <= 'f') ||
(section[2] >= 'A' && section[2] <= 'F')))
{
int b{0};
if(section[1] >= '0' && section[1] <= '9')
b = (section[1]-'0') << 4;
else if(section[1] >= 'a' && section[1] <= 'f')
b = (section[1]-'a'+0xa) << 4;
else if(section[1] >= 'A' && section[1] <= 'F')
b = (section[1]-'A'+0x0a) << 4;
if(section[2] >= '0' && section[2] <= '9')
b |= (section[2]-'0');
else if(section[2] >= 'a' && section[2] <= 'f')
b |= (section[2]-'a'+0xa);
else if(section[2] >= 'A' && section[2] <= 'F')
b |= (section[2]-'A'+0x0a);
curSection += static_cast<char>(b);
section += 3;
}
else if(section[1] == '%')
{
curSection += '%';
section += 2;
}
else
{
curSection += '%';
section += 1;
}
} while(*section != 0);
}
continue;
}
auto cmtpos = std::min(buffer.find('#'), buffer.size());
while(cmtpos > 0 && std::isspace(buffer[cmtpos-1]))
--cmtpos;
if(!cmtpos) continue;
buffer.erase(cmtpos);
auto sep = buffer.find('=');
if(sep == std::string::npos)
{
ERR(" config parse error: malformed option line: \"%s\"\n", buffer.c_str());
continue;
}
auto keyend = sep++;
while(keyend > 0 && std::isspace(buffer[keyend-1]))
--keyend;
if(!keyend)
{
ERR(" config parse error: malformed option line: \"%s\"\n", buffer.c_str());
continue;
}
while(sep < buffer.size() && std::isspace(buffer[sep]))
sep++;
std::string fullKey;
if(!curSection.empty())
{
fullKey += curSection;
fullKey += '/';
}
fullKey += buffer.substr(0u, keyend);
std::string value{(sep < buffer.size()) ? buffer.substr(sep) : std::string{}};
if(value.size() > 1)
{
if((value.front() == '"' && value.back() == '"')
|| (value.front() == '\'' && value.back() == '\''))
{
value.pop_back();
value.erase(value.begin());
}
}
TRACE(" found '%s' = '%s'\n", fullKey.c_str(), value.c_str());
/* Check if we already have this option set */
auto find_key = [&fullKey](const ConfigEntry &entry) -> bool
{ return entry.key == fullKey; };
auto ent = std::find_if(ConfOpts.begin(), ConfOpts.end(), find_key);
if(ent != ConfOpts.end())
{
if(!value.empty())
ent->value = expdup(value.c_str());
else
ConfOpts.erase(ent);
}
else if(!value.empty())
ConfOpts.emplace_back(ConfigEntry{std::move(fullKey), expdup(value.c_str())});
}
ConfOpts.shrink_to_fit();
}
} // namespace
#ifdef _WIN32
void ReadALConfig()
{
WCHAR buffer[MAX_PATH];
if(SHGetSpecialFolderPathW(nullptr, buffer, CSIDL_APPDATA, FALSE) != FALSE)
{
std::string filepath{wstr_to_utf8(buffer)};
filepath += "\\alsoft.ini";
TRACE("Loading config %s...\n", filepath.c_str());
al::ifstream f{filepath};
if(f.is_open())
LoadConfigFromFile(f);
}
std::string ppath{GetProcBinary().path};
if(!ppath.empty())
{
ppath += "\\alsoft.ini";
TRACE("Loading config %s...\n", ppath.c_str());
al::ifstream f{ppath};
if(f.is_open())
LoadConfigFromFile(f);
}
if(auto confpath = al::getenv(L"ALSOFT_CONF"))
{
TRACE("Loading config %s...\n", wstr_to_utf8(confpath->c_str()).c_str());
al::ifstream f{*confpath};
if(f.is_open())
LoadConfigFromFile(f);
}
}
#else
void ReadALConfig()
{
const char *str{"/etc/openal/alsoft.conf"};
TRACE("Loading config %s...\n", str);
al::ifstream f{str};
if(f.is_open())
LoadConfigFromFile(f);
f.close();
std::string confpaths{al::getenv("XDG_CONFIG_DIRS").value_or("/etc/xdg")};
/* Go through the list in reverse, since "the order of base directories
* denotes their importance; the first directory listed is the most
* important". Ergo, we need to load the settings from the later dirs
* first so that the settings in the earlier dirs override them.
*/
std::string fname;
while(!confpaths.empty())
{
auto next = confpaths.find_last_of(':');
if(next < confpaths.length())
{
fname = confpaths.substr(next+1);
confpaths.erase(next);
}
else
{
fname = confpaths;
confpaths.clear();
}
if(fname.empty() || fname.front() != '/')
WARN("Ignoring XDG config dir: %s\n", fname.c_str());
else
{
if(fname.back() != '/') fname += "/alsoft.conf";
else fname += "alsoft.conf";
TRACE("Loading config %s...\n", fname.c_str());
f = al::ifstream{fname};
if(f.is_open())
LoadConfigFromFile(f);
}
fname.clear();
}
#ifdef __APPLE__
CFBundleRef mainBundle = CFBundleGetMainBundle();
if(mainBundle)
{
unsigned char fileName[PATH_MAX];
CFURLRef configURL;
if((configURL=CFBundleCopyResourceURL(mainBundle, CFSTR(".alsoftrc"), CFSTR(""), nullptr)) &&
CFURLGetFileSystemRepresentation(configURL, true, fileName, sizeof(fileName)))
{
f = al::ifstream{reinterpret_cast<char*>(fileName)};
if(f.is_open())
LoadConfigFromFile(f);
}
}
#endif
if(auto homedir = al::getenv("HOME"))
{
fname = *homedir;
if(fname.back() != '/') fname += "/.alsoftrc";
else fname += ".alsoftrc";
TRACE("Loading config %s...\n", fname.c_str());
f = al::ifstream{fname};
if(f.is_open())
LoadConfigFromFile(f);
}
if(auto configdir = al::getenv("XDG_CONFIG_HOME"))
{
fname = *configdir;
if(fname.back() != '/') fname += "/alsoft.conf";
else fname += "alsoft.conf";
}
else
{
fname.clear();
if(auto homedir = al::getenv("HOME"))
{
fname = *homedir;
if(fname.back() != '/') fname += "/.config/alsoft.conf";
else fname += ".config/alsoft.conf";
}
}
if(!fname.empty())
{
TRACE("Loading config %s...\n", fname.c_str());
f = al::ifstream{fname};
if(f.is_open())
LoadConfigFromFile(f);
}
std::string ppath{GetProcBinary().path};
if(!ppath.empty())
{
if(ppath.back() != '/') ppath += "/alsoft.conf";
else ppath += "alsoft.conf";
TRACE("Loading config %s...\n", ppath.c_str());
f = al::ifstream{ppath};
if(f.is_open())
LoadConfigFromFile(f);
}
if(auto confname = al::getenv("ALSOFT_CONF"))
{
TRACE("Loading config %s...\n", confname->c_str());
f = al::ifstream{*confname};
if(f.is_open())
LoadConfigFromFile(f);
}
}
#endif
const char *GetConfigValue(const char *devName, const char *blockName, const char *keyName, const char *def)
{
if(!keyName)
return def;
std::string key;
if(blockName && al::strcasecmp(blockName, "general") != 0)
{
key = blockName;
if(devName)
{
key += '/';
key += devName;
}
key += '/';
key += keyName;
}
else
{
if(devName)
{
key = devName;
key += '/';
}
key += keyName;
}
auto iter = std::find_if(ConfOpts.cbegin(), ConfOpts.cend(),
[&key](const ConfigEntry &entry) -> bool
{ return entry.key == key; }
);
if(iter != ConfOpts.cend())
{
TRACE("Found %s = \"%s\"\n", key.c_str(), iter->value.c_str());
if(!iter->value.empty())
return iter->value.c_str();
return def;
}
if(!devName)
{
TRACE("Key %s not found\n", key.c_str());
return def;
}
return GetConfigValue(nullptr, blockName, keyName, def);
}
int ConfigValueExists(const char *devName, const char *blockName, const char *keyName)
{
const char *val = GetConfigValue(devName, blockName, keyName, "");
return val[0] != 0;
}
al::optional<std::string> ConfigValueStr(const char *devName, const char *blockName, const char *keyName)
{
const char *val = GetConfigValue(devName, blockName, keyName, "");
if(!val[0]) return al::nullopt;
return al::make_optional<std::string>(val);
}
al::optional<int> ConfigValueInt(const char *devName, const char *blockName, const char *keyName)
{
const char *val = GetConfigValue(devName, blockName, keyName, "");
if(!val[0]) return al::nullopt;
return al::make_optional(static_cast<int>(std::strtol(val, nullptr, 0)));
}
al::optional<unsigned int> ConfigValueUInt(const char *devName, const char *blockName, const char *keyName)
{
const char *val = GetConfigValue(devName, blockName, keyName, "");
if(!val[0]) return al::nullopt;
return al::make_optional(static_cast<unsigned int>(std::strtoul(val, nullptr, 0)));
}
al::optional<float> ConfigValueFloat(const char *devName, const char *blockName, const char *keyName)
{
const char *val = GetConfigValue(devName, blockName, keyName, "");
if(!val[0]) return al::nullopt;
return al::make_optional(std::strtof(val, nullptr));
}
al::optional<bool> ConfigValueBool(const char *devName, const char *blockName, const char *keyName)
{
const char *val = GetConfigValue(devName, blockName, keyName, "");
if(!val[0]) return al::nullopt;
return al::make_optional(
al::strcasecmp(val, "true") == 0 || al::strcasecmp(val, "yes") == 0 ||
al::strcasecmp(val, "on") == 0 || atoi(val) != 0);
}
int GetConfigValueBool(const char *devName, const char *blockName, const char *keyName, int def)
{
const char *val = GetConfigValue(devName, blockName, keyName, "");
if(!val[0]) return def != 0;
return (al::strcasecmp(val, "true") == 0 || al::strcasecmp(val, "yes") == 0 ||
al::strcasecmp(val, "on") == 0 || atoi(val) != 0);
}

View file

@ -1,17 +1,20 @@
#ifndef ALCONFIG_H
#define ALCONFIG_H
void ReadALConfig(void);
void FreeALConfig(void);
#include <string>
#include "aloptional.h"
void ReadALConfig();
int ConfigValueExists(const char *devName, const char *blockName, const char *keyName);
const char *GetConfigValue(const char *devName, const char *blockName, const char *keyName, const char *def);
int GetConfigValueBool(const char *devName, const char *blockName, const char *keyName, int def);
int ConfigValueStr(const char *devName, const char *blockName, const char *keyName, const char **ret);
int ConfigValueInt(const char *devName, const char *blockName, const char *keyName, int *ret);
int ConfigValueUInt(const char *devName, const char *blockName, const char *keyName, unsigned int *ret);
int ConfigValueFloat(const char *devName, const char *blockName, const char *keyName, float *ret);
int ConfigValueBool(const char *devName, const char *blockName, const char *keyName, int *ret);
al::optional<std::string> ConfigValueStr(const char *devName, const char *blockName, const char *keyName);
al::optional<int> ConfigValueInt(const char *devName, const char *blockName, const char *keyName);
al::optional<unsigned int> ConfigValueUInt(const char *devName, const char *blockName, const char *keyName);
al::optional<float> ConfigValueFloat(const char *devName, const char *blockName, const char *keyName);
al::optional<bool> ConfigValueBool(const char *devName, const char *blockName, const char *keyName);
#endif /* ALCONFIG_H */

View file

@ -0,0 +1,282 @@
#ifndef ALCONTEXT_H
#define ALCONTEXT_H
#include <array>
#include <atomic>
#include <cstddef>
#include <cstdint>
#include <memory>
#include <mutex>
#include <thread>
#include <utility>
#include "AL/al.h"
#include "AL/alc.h"
#include "al/listener.h"
#include "almalloc.h"
#include "alnumeric.h"
#include "alu.h"
#include "atomic.h"
#include "inprogext.h"
#include "intrusive_ptr.h"
#include "threads.h"
#include "vecmat.h"
#include "vector.h"
struct ALeffectslot;
struct ALsource;
struct EffectSlot;
struct EffectSlotProps;
struct RingBuffer;
struct Voice;
struct VoiceChange;
struct VoicePropsItem;
enum class DistanceModel : unsigned char {
Disable,
Inverse, InverseClamped,
Linear, LinearClamped,
Exponent, ExponentClamped,
Default = InverseClamped
};
struct WetBuffer {
bool mInUse;
al::FlexArray<FloatBufferLine, 16> mBuffer;
WetBuffer(size_t count) : mBuffer{count} { }
DEF_FAM_NEWDEL(WetBuffer, mBuffer)
};
using WetBufferPtr = std::unique_ptr<WetBuffer>;
struct ContextProps {
float DopplerFactor;
float DopplerVelocity;
float SpeedOfSound;
bool SourceDistanceModel;
DistanceModel mDistanceModel;
std::atomic<ContextProps*> next;
DEF_NEWDEL(ContextProps)
};
struct ListenerProps {
std::array<float,3> Position;
std::array<float,3> Velocity;
std::array<float,3> OrientAt;
std::array<float,3> OrientUp;
float Gain;
float MetersPerUnit;
std::atomic<ListenerProps*> next;
DEF_NEWDEL(ListenerProps)
};
struct ContextParams {
/* Pointer to the most recent property values that are awaiting an update. */
std::atomic<ContextProps*> ContextUpdate{nullptr};
std::atomic<ListenerProps*> ListenerUpdate{nullptr};
alu::Matrix Matrix{alu::Matrix::Identity()};
alu::Vector Velocity{};
float Gain{1.0f};
float MetersPerUnit{1.0f};
float DopplerFactor{1.0f};
float SpeedOfSound{343.3f}; /* in units per sec! */
bool SourceDistanceModel{false};
DistanceModel mDistanceModel{};
};
struct SourceSubList {
uint64_t FreeMask{~0_u64};
ALsource *Sources{nullptr}; /* 64 */
SourceSubList() noexcept = default;
SourceSubList(const SourceSubList&) = delete;
SourceSubList(SourceSubList&& rhs) noexcept : FreeMask{rhs.FreeMask}, Sources{rhs.Sources}
{ rhs.FreeMask = ~0_u64; rhs.Sources = nullptr; }
~SourceSubList();
SourceSubList& operator=(const SourceSubList&) = delete;
SourceSubList& operator=(SourceSubList&& rhs) noexcept
{ std::swap(FreeMask, rhs.FreeMask); std::swap(Sources, rhs.Sources); return *this; }
};
struct EffectSlotSubList {
uint64_t FreeMask{~0_u64};
ALeffectslot *EffectSlots{nullptr}; /* 64 */
EffectSlotSubList() noexcept = default;
EffectSlotSubList(const EffectSlotSubList&) = delete;
EffectSlotSubList(EffectSlotSubList&& rhs) noexcept
: FreeMask{rhs.FreeMask}, EffectSlots{rhs.EffectSlots}
{ rhs.FreeMask = ~0_u64; rhs.EffectSlots = nullptr; }
~EffectSlotSubList();
EffectSlotSubList& operator=(const EffectSlotSubList&) = delete;
EffectSlotSubList& operator=(EffectSlotSubList&& rhs) noexcept
{ std::swap(FreeMask, rhs.FreeMask); std::swap(EffectSlots, rhs.EffectSlots); return *this; }
};
struct ALCcontext : public al::intrusive_ref<ALCcontext> {
const al::intrusive_ptr<ALCdevice> mDevice;
/* Counter for the pre-mixing updates, in 31.1 fixed point (lowest bit
* indicates if updates are currently happening).
*/
RefCount mUpdateCount{0u};
std::atomic<bool> mHoldUpdates{false};
float mGainBoost{1.0f};
/* Linked lists of unused property containers, free to use for future
* updates.
*/
std::atomic<ContextProps*> mFreeContextProps{nullptr};
std::atomic<ListenerProps*> mFreeListenerProps{nullptr};
std::atomic<VoicePropsItem*> mFreeVoiceProps{nullptr};
std::atomic<EffectSlotProps*> mFreeEffectslotProps{nullptr};
/* The voice change tail is the beginning of the "free" elements, up to and
* *excluding* the current. If tail==current, there's no free elements and
* new ones need to be allocated. The current voice change is the element
* last processed, and any after are pending.
*/
VoiceChange *mVoiceChangeTail{};
std::atomic<VoiceChange*> mCurrentVoiceChange{};
void allocVoiceChanges(size_t addcount);
ContextParams mParams;
using VoiceArray = al::FlexArray<Voice*>;
std::atomic<VoiceArray*> mVoices{};
std::atomic<size_t> mActiveVoiceCount{};
void allocVoices(size_t addcount);
al::span<Voice*> getVoicesSpan() const noexcept
{
return {mVoices.load(std::memory_order_relaxed)->data(),
mActiveVoiceCount.load(std::memory_order_relaxed)};
}
al::span<Voice*> getVoicesSpanAcquired() const noexcept
{
return {mVoices.load(std::memory_order_acquire)->data(),
mActiveVoiceCount.load(std::memory_order_acquire)};
}
using EffectSlotArray = al::FlexArray<EffectSlot*>;
std::atomic<EffectSlotArray*> mActiveAuxSlots{nullptr};
std::thread mEventThread;
al::semaphore mEventSem;
std::unique_ptr<RingBuffer> mAsyncEvents;
std::atomic<uint> mEnabledEvts{0u};
/* Asynchronous voice change actions are processed as a linked list of
* VoiceChange objects by the mixer, which is atomically appended to.
* However, to avoid allocating each object individually, they're allocated
* in clusters that are stored in a vector for easy automatic cleanup.
*/
using VoiceChangeCluster = std::unique_ptr<VoiceChange[]>;
al::vector<VoiceChangeCluster> mVoiceChangeClusters;
using VoiceCluster = std::unique_ptr<Voice[]>;
al::vector<VoiceCluster> mVoiceClusters;
/* Wet buffers used by effect slots. */
al::vector<WetBufferPtr> mWetBuffers;
std::atomic_flag mPropsClean;
std::atomic<bool> mDeferUpdates{false};
std::mutex mPropLock;
std::atomic<ALenum> mLastError{AL_NO_ERROR};
DistanceModel mDistanceModel{DistanceModel::Default};
bool mSourceDistanceModel{false};
float mDopplerFactor{1.0f};
float mDopplerVelocity{1.0f};
float mSpeedOfSound{SpeedOfSoundMetersPerSec};
std::mutex mEventCbLock;
ALEVENTPROCSOFT mEventCb{};
void *mEventParam{nullptr};
ALlistener mListener{};
al::vector<SourceSubList> mSourceList;
ALuint mNumSources{0};
std::mutex mSourceLock;
al::vector<EffectSlotSubList> mEffectSlotList;
ALuint mNumEffectSlots{0u};
std::mutex mEffectSlotLock;
/* Default effect slot */
std::unique_ptr<ALeffectslot> mDefaultSlot;
const char *mExtensionList{nullptr};
ALCcontext(al::intrusive_ptr<ALCdevice> device);
ALCcontext(const ALCcontext&) = delete;
ALCcontext& operator=(const ALCcontext&) = delete;
~ALCcontext();
void init();
/**
* Removes the context from its device and removes it from being current on
* the running thread or globally. Returns true if other contexts still
* exist on the device.
*/
bool deinit();
/**
* Defers/suspends updates for the given context's listener and sources.
* This does *NOT* stop mixing, but rather prevents certain property
* changes from taking effect.
*/
void deferUpdates() noexcept { mDeferUpdates.exchange(true, std::memory_order_acq_rel); }
/** Resumes update processing after being deferred. */
void processUpdates();
[[gnu::format(printf,3,4)]] void setError(ALenum errorCode, const char *msg, ...);
DEF_NEWDEL(ALCcontext)
};
#define SETERR_RETURN(ctx, err, retval, ...) do { \
(ctx)->setError((err), __VA_ARGS__); \
return retval; \
} while(0)
using ContextRef = al::intrusive_ptr<ALCcontext>;
ContextRef GetContextRef(void);
void UpdateContextProps(ALCcontext *context);
extern bool TrapALError;
#endif /* ALCONTEXT_H */

View file

@ -1,58 +0,0 @@
#ifndef ALSTRING_H
#define ALSTRING_H
#include <string.h>
#include "vector.h"
#ifdef __cplusplus
extern "C" {
#endif
typedef char al_string_char_type;
TYPEDEF_VECTOR(al_string_char_type, al_string)
TYPEDEF_VECTOR(al_string, vector_al_string)
inline void alstr_reset(al_string *str)
{ VECTOR_DEINIT(*str); }
#define AL_STRING_INIT(_x) do { (_x) = (al_string)NULL; } while(0)
#define AL_STRING_INIT_STATIC() ((al_string)NULL)
#define AL_STRING_DEINIT(_x) alstr_reset(&(_x))
inline size_t alstr_length(const_al_string str)
{ return VECTOR_SIZE(str); }
inline ALboolean alstr_empty(const_al_string str)
{ return alstr_length(str) == 0; }
inline const al_string_char_type *alstr_get_cstr(const_al_string str)
{ return str ? &VECTOR_FRONT(str) : ""; }
void alstr_clear(al_string *str);
int alstr_cmp(const_al_string str1, const_al_string str2);
int alstr_cmp_cstr(const_al_string str1, const al_string_char_type *str2);
void alstr_copy(al_string *str, const_al_string from);
void alstr_copy_cstr(al_string *str, const al_string_char_type *from);
void alstr_copy_range(al_string *str, const al_string_char_type *from, const al_string_char_type *to);
void alstr_append_char(al_string *str, const al_string_char_type c);
void alstr_append_cstr(al_string *str, const al_string_char_type *from);
void alstr_append_range(al_string *str, const al_string_char_type *from, const al_string_char_type *to);
#ifdef _WIN32
#include <wchar.h>
/* Windows-only methods to deal with WideChar strings. */
void alstr_copy_wcstr(al_string *str, const wchar_t *from);
void alstr_append_wcstr(al_string *str, const wchar_t *from);
void alstr_copy_wrange(al_string *str, const wchar_t *from, const wchar_t *to);
void alstr_append_wrange(al_string *str, const wchar_t *from, const wchar_t *to);
#endif
#ifdef __cplusplus
} /* extern "C" */
#endif
#endif /* ALSTRING_H */

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,141 @@
#ifndef ALU_H
#define ALU_H
#include <array>
#include <cmath>
#include <cstddef>
#include <type_traits>
#include "alspan.h"
#include "core/ambidefs.h"
#include "core/bufferline.h"
#include "core/devformat.h"
struct ALCcontext;
struct ALCdevice;
struct EffectSlot;
struct MixParams;
#define MAX_SENDS 6
using MixerFunc = void(*)(const al::span<const float> InSamples,
const al::span<FloatBufferLine> OutBuffer, float *CurrentGains, const float *TargetGains,
const size_t Counter, const size_t OutPos);
extern MixerFunc MixSamples;
constexpr float GainMixMax{1000.0f}; /* +60dB */
constexpr float SpeedOfSoundMetersPerSec{343.3f};
constexpr float AirAbsorbGainHF{0.99426f}; /* -0.05dB */
/** Target gain for the reverb decay feedback reaching the decay time. */
constexpr float ReverbDecayGain{0.001f}; /* -60 dB */
enum HrtfRequestMode {
Hrtf_Default = 0,
Hrtf_Enable = 1,
Hrtf_Disable = 2,
};
void aluInit(void);
void aluInitMixer(void);
/* aluInitRenderer
*
* Set up the appropriate panning method and mixing method given the device
* properties.
*/
void aluInitRenderer(ALCdevice *device, int hrtf_id, HrtfRequestMode hrtf_appreq,
HrtfRequestMode hrtf_userreq);
void aluInitEffectPanning(EffectSlot *slot, ALCcontext *context);
/**
* Calculates ambisonic encoder coefficients using the X, Y, and Z direction
* components, which must represent a normalized (unit length) vector, and the
* spread is the angular width of the sound (0...tau).
*
* NOTE: The components use ambisonic coordinates. As a result:
*
* Ambisonic Y = OpenAL -X
* Ambisonic Z = OpenAL Y
* Ambisonic X = OpenAL -Z
*
* The components are ordered such that OpenAL's X, Y, and Z are the first,
* second, and third parameters respectively -- simply negate X and Z.
*/
std::array<float,MaxAmbiChannels> CalcAmbiCoeffs(const float y, const float z, const float x,
const float spread);
/**
* CalcDirectionCoeffs
*
* Calculates ambisonic coefficients based on an OpenAL direction vector. The
* vector must be normalized (unit length), and the spread is the angular width
* of the sound (0...tau).
*/
inline std::array<float,MaxAmbiChannels> CalcDirectionCoeffs(const float (&dir)[3],
const float spread)
{
/* Convert from OpenAL coords to Ambisonics. */
return CalcAmbiCoeffs(-dir[0], dir[1], -dir[2], spread);
}
/**
* CalcAngleCoeffs
*
* Calculates ambisonic coefficients based on azimuth and elevation. The
* azimuth and elevation parameters are in radians, going right and up
* respectively.
*/
inline std::array<float,MaxAmbiChannels> CalcAngleCoeffs(const float azimuth,
const float elevation, const float spread)
{
const float x{-std::sin(azimuth) * std::cos(elevation)};
const float y{ std::sin(elevation)};
const float z{ std::cos(azimuth) * std::cos(elevation)};
return CalcAmbiCoeffs(x, y, z, spread);
}
/**
* ComputePanGains
*
* Computes panning gains using the given channel decoder coefficients and the
* pre-calculated direction or angle coefficients. For B-Format sources, the
* coeffs are a 'slice' of a transform matrix for the input channel, used to
* scale and orient the sound samples.
*/
void ComputePanGains(const MixParams *mix, const float*RESTRICT coeffs, const float ingain,
const al::span<float,MAX_OUTPUT_CHANNELS> gains);
/** Helper to set an identity/pass-through panning for ambisonic mixing (3D input). */
template<typename T, typename I, typename F>
auto SetAmbiPanIdentity(T iter, I count, F func) -> std::enable_if_t<std::is_integral<I>::value>
{
if(count < 1) return;
std::array<float,MaxAmbiChannels> coeffs{{1.0f}};
func(*iter, coeffs);
++iter;
for(I i{1};i < count;++i,++iter)
{
coeffs[i-1] = 0.0f;
coeffs[i ] = 1.0f;
func(*iter, coeffs);
}
}
extern const float ConeScale;
extern const float ZScale;
#endif

View file

@ -1,566 +0,0 @@
#include "config.h"
#include "ambdec.h"
#include <stdio.h>
#include <string.h>
#include <ctype.h>
#include "compat.h"
static char *lstrip(char *line)
{
while(isspace(line[0]))
line++;
return line;
}
static char *rstrip(char *line)
{
size_t len = strlen(line);
while(len > 0 && isspace(line[len-1]))
len--;
line[len] = 0;
return line;
}
static int readline(FILE *f, char **output, size_t *maxlen)
{
size_t len = 0;
int c;
while((c=fgetc(f)) != EOF && (c == '\r' || c == '\n'))
;
if(c == EOF)
return 0;
do {
if(len+1 >= *maxlen)
{
void *temp = NULL;
size_t newmax;
newmax = (*maxlen ? (*maxlen)<<1 : 32);
if(newmax > *maxlen)
temp = realloc(*output, newmax);
if(!temp)
{
ERR("Failed to realloc "SZFMT" bytes from "SZFMT"!\n", newmax, *maxlen);
return 0;
}
*output = temp;
*maxlen = newmax;
}
(*output)[len++] = c;
(*output)[len] = '\0';
} while((c=fgetc(f)) != EOF && c != '\r' && c != '\n');
return 1;
}
/* Custom strtok_r, since we can't rely on it existing. */
static char *my_strtok_r(char *str, const char *delim, char **saveptr)
{
/* Sanity check and update internal pointer. */
if(!saveptr || !delim) return NULL;
if(str) *saveptr = str;
str = *saveptr;
/* Nothing more to do with this string. */
if(!str) return NULL;
/* Find the first non-delimiter character. */
while(*str != '\0' && strchr(delim, *str) != NULL)
str++;
if(*str == '\0')
{
/* End of string. */
*saveptr = NULL;
return NULL;
}
/* Find the next delimiter character. */
*saveptr = strpbrk(str, delim);
if(*saveptr) *((*saveptr)++) = '\0';
return str;
}
static char *read_int(ALint *num, const char *line, int base)
{
char *end;
*num = strtol(line, &end, base);
if(end && *end != '\0')
end = lstrip(end);
return end;
}
static char *read_uint(ALuint *num, const char *line, int base)
{
char *end;
*num = strtoul(line, &end, base);
if(end && *end != '\0')
end = lstrip(end);
return end;
}
static char *read_float(ALfloat *num, const char *line)
{
char *end;
#ifdef HAVE_STRTOF
*num = strtof(line, &end);
#else
*num = (ALfloat)strtod(line, &end);
#endif
if(end && *end != '\0')
end = lstrip(end);
return end;
}
char *read_clipped_line(FILE *f, char **buffer, size_t *maxlen)
{
while(readline(f, buffer, maxlen))
{
char *line, *comment;
line = lstrip(*buffer);
comment = strchr(line, '#');
if(comment) *(comment++) = 0;
line = rstrip(line);
if(line[0]) return line;
}
return NULL;
}
static int load_ambdec_speakers(AmbDecConf *conf, FILE *f, char **buffer, size_t *maxlen, char **saveptr)
{
ALsizei cur = 0;
while(cur < conf->NumSpeakers)
{
const char *cmd = my_strtok_r(NULL, " \t", saveptr);
if(!cmd)
{
char *line = read_clipped_line(f, buffer, maxlen);
if(!line)
{
ERR("Unexpected end of file\n");
return 0;
}
cmd = my_strtok_r(line, " \t", saveptr);
}
if(strcmp(cmd, "add_spkr") == 0)
{
const char *name = my_strtok_r(NULL, " \t", saveptr);
const char *dist = my_strtok_r(NULL, " \t", saveptr);
const char *az = my_strtok_r(NULL, " \t", saveptr);
const char *elev = my_strtok_r(NULL, " \t", saveptr);
const char *conn = my_strtok_r(NULL, " \t", saveptr);
if(!name) WARN("Name not specified for speaker %u\n", cur+1);
else alstr_copy_cstr(&conf->Speakers[cur].Name, name);
if(!dist) WARN("Distance not specified for speaker %u\n", cur+1);
else read_float(&conf->Speakers[cur].Distance, dist);
if(!az) WARN("Azimuth not specified for speaker %u\n", cur+1);
else read_float(&conf->Speakers[cur].Azimuth, az);
if(!elev) WARN("Elevation not specified for speaker %u\n", cur+1);
else read_float(&conf->Speakers[cur].Elevation, elev);
if(!conn) TRACE("Connection not specified for speaker %u\n", cur+1);
else alstr_copy_cstr(&conf->Speakers[cur].Connection, conn);
cur++;
}
else
{
ERR("Unexpected speakers command: %s\n", cmd);
return 0;
}
cmd = my_strtok_r(NULL, " \t", saveptr);
if(cmd)
{
ERR("Unexpected junk on line: %s\n", cmd);
return 0;
}
}
return 1;
}
static int load_ambdec_matrix(ALfloat *gains, ALfloat (*matrix)[MAX_AMBI_COEFFS], ALsizei maxrow, FILE *f, char **buffer, size_t *maxlen, char **saveptr)
{
int gotgains = 0;
ALsizei cur = 0;
while(cur < maxrow)
{
const char *cmd = my_strtok_r(NULL, " \t", saveptr);
if(!cmd)
{
char *line = read_clipped_line(f, buffer, maxlen);
if(!line)
{
ERR("Unexpected end of file\n");
return 0;
}
cmd = my_strtok_r(line, " \t", saveptr);
}
if(strcmp(cmd, "order_gain") == 0)
{
ALuint curgain = 0;
char *line;
while((line=my_strtok_r(NULL, " \t", saveptr)) != NULL)
{
ALfloat value;
line = read_float(&value, line);
if(line && *line != '\0')
{
ERR("Extra junk on gain %u: %s\n", curgain+1, line);
return 0;
}
if(curgain < MAX_AMBI_ORDER+1)
gains[curgain] = value;
curgain++;
}
while(curgain < MAX_AMBI_ORDER+1)
gains[curgain++] = 0.0f;
gotgains = 1;
}
else if(strcmp(cmd, "add_row") == 0)
{
ALuint curidx = 0;
char *line;
while((line=my_strtok_r(NULL, " \t", saveptr)) != NULL)
{
ALfloat value;
line = read_float(&value, line);
if(line && *line != '\0')
{
ERR("Extra junk on matrix element %ux%u: %s\n", cur, curidx, line);
return 0;
}
if(curidx < MAX_AMBI_COEFFS)
matrix[cur][curidx] = value;
curidx++;
}
while(curidx < MAX_AMBI_COEFFS)
matrix[cur][curidx++] = 0.0f;
cur++;
}
else
{
ERR("Unexpected speakers command: %s\n", cmd);
return 0;
}
cmd = my_strtok_r(NULL, " \t", saveptr);
if(cmd)
{
ERR("Unexpected junk on line: %s\n", cmd);
return 0;
}
}
if(!gotgains)
{
ERR("Matrix order_gain not specified\n");
return 0;
}
return 1;
}
void ambdec_init(AmbDecConf *conf)
{
ALsizei i;
memset(conf, 0, sizeof(*conf));
AL_STRING_INIT(conf->Description);
for(i = 0;i < MAX_OUTPUT_CHANNELS;i++)
{
AL_STRING_INIT(conf->Speakers[i].Name);
AL_STRING_INIT(conf->Speakers[i].Connection);
}
}
void ambdec_deinit(AmbDecConf *conf)
{
ALsizei i;
alstr_reset(&conf->Description);
for(i = 0;i < MAX_OUTPUT_CHANNELS;i++)
{
alstr_reset(&conf->Speakers[i].Name);
alstr_reset(&conf->Speakers[i].Connection);
}
memset(conf, 0, sizeof(*conf));
}
int ambdec_load(AmbDecConf *conf, const char *fname)
{
char *buffer = NULL;
size_t maxlen = 0;
char *line;
FILE *f;
f = al_fopen(fname, "r");
if(!f)
{
ERR("Failed to open: %s\n", fname);
return 0;
}
while((line=read_clipped_line(f, &buffer, &maxlen)) != NULL)
{
char *saveptr;
char *command;
command = my_strtok_r(line, "/ \t", &saveptr);
if(!command)
{
ERR("Malformed line: %s\n", line);
goto fail;
}
if(strcmp(command, "description") == 0)
{
char *value = my_strtok_r(NULL, "", &saveptr);
alstr_copy_cstr(&conf->Description, lstrip(value));
}
else if(strcmp(command, "version") == 0)
{
line = my_strtok_r(NULL, "", &saveptr);
line = read_uint(&conf->Version, line, 10);
if(line && *line != '\0')
{
ERR("Extra junk after version: %s\n", line);
goto fail;
}
if(conf->Version != 3)
{
ERR("Unsupported version: %u\n", conf->Version);
goto fail;
}
}
else if(strcmp(command, "dec") == 0)
{
const char *dec = my_strtok_r(NULL, "/ \t", &saveptr);
if(strcmp(dec, "chan_mask") == 0)
{
line = my_strtok_r(NULL, "", &saveptr);
line = read_uint(&conf->ChanMask, line, 16);
if(line && *line != '\0')
{
ERR("Extra junk after mask: %s\n", line);
goto fail;
}
}
else if(strcmp(dec, "freq_bands") == 0)
{
line = my_strtok_r(NULL, "", &saveptr);
line = read_uint(&conf->FreqBands, line, 10);
if(line && *line != '\0')
{
ERR("Extra junk after freq_bands: %s\n", line);
goto fail;
}
if(conf->FreqBands != 1 && conf->FreqBands != 2)
{
ERR("Invalid freq_bands value: %u\n", conf->FreqBands);
goto fail;
}
}
else if(strcmp(dec, "speakers") == 0)
{
line = my_strtok_r(NULL, "", &saveptr);
line = read_int(&conf->NumSpeakers, line, 10);
if(line && *line != '\0')
{
ERR("Extra junk after speakers: %s\n", line);
goto fail;
}
if(conf->NumSpeakers > MAX_OUTPUT_CHANNELS)
{
ERR("Unsupported speaker count: %u\n", conf->NumSpeakers);
goto fail;
}
}
else if(strcmp(dec, "coeff_scale") == 0)
{
line = my_strtok_r(NULL, " \t", &saveptr);
if(strcmp(line, "n3d") == 0)
conf->CoeffScale = ADS_N3D;
else if(strcmp(line, "sn3d") == 0)
conf->CoeffScale = ADS_SN3D;
else if(strcmp(line, "fuma") == 0)
conf->CoeffScale = ADS_FuMa;
else
{
ERR("Unsupported coeff scale: %s\n", line);
goto fail;
}
}
else
{
ERR("Unexpected /dec option: %s\n", dec);
goto fail;
}
}
else if(strcmp(command, "opt") == 0)
{
const char *opt = my_strtok_r(NULL, "/ \t", &saveptr);
if(strcmp(opt, "xover_freq") == 0)
{
line = my_strtok_r(NULL, "", &saveptr);
line = read_float(&conf->XOverFreq, line);
if(line && *line != '\0')
{
ERR("Extra junk after xover_freq: %s\n", line);
goto fail;
}
}
else if(strcmp(opt, "xover_ratio") == 0)
{
line = my_strtok_r(NULL, "", &saveptr);
line = read_float(&conf->XOverRatio, line);
if(line && *line != '\0')
{
ERR("Extra junk after xover_ratio: %s\n", line);
goto fail;
}
}
else if(strcmp(opt, "input_scale") == 0 || strcmp(opt, "nfeff_comp") == 0 ||
strcmp(opt, "delay_comp") == 0 || strcmp(opt, "level_comp") == 0)
{
/* Unused */
my_strtok_r(NULL, " \t", &saveptr);
}
else
{
ERR("Unexpected /opt option: %s\n", opt);
goto fail;
}
}
else if(strcmp(command, "speakers") == 0)
{
const char *value = my_strtok_r(NULL, "/ \t", &saveptr);
if(strcmp(value, "{") != 0)
{
ERR("Expected { after %s command, got %s\n", command, value);
goto fail;
}
if(!load_ambdec_speakers(conf, f, &buffer, &maxlen, &saveptr))
goto fail;
value = my_strtok_r(NULL, "/ \t", &saveptr);
if(!value)
{
line = read_clipped_line(f, &buffer, &maxlen);
if(!line)
{
ERR("Unexpected end of file\n");
goto fail;
}
value = my_strtok_r(line, "/ \t", &saveptr);
}
if(strcmp(value, "}") != 0)
{
ERR("Expected } after speaker definitions, got %s\n", value);
goto fail;
}
}
else if(strcmp(command, "lfmatrix") == 0 || strcmp(command, "hfmatrix") == 0 ||
strcmp(command, "matrix") == 0)
{
const char *value = my_strtok_r(NULL, "/ \t", &saveptr);
if(strcmp(value, "{") != 0)
{
ERR("Expected { after %s command, got %s\n", command, value);
goto fail;
}
if(conf->FreqBands == 1)
{
if(strcmp(command, "matrix") != 0)
{
ERR("Unexpected \"%s\" type for a single-band decoder\n", command);
goto fail;
}
if(!load_ambdec_matrix(conf->HFOrderGain, conf->HFMatrix, conf->NumSpeakers,
f, &buffer, &maxlen, &saveptr))
goto fail;
}
else
{
if(strcmp(command, "lfmatrix") == 0)
{
if(!load_ambdec_matrix(conf->LFOrderGain, conf->LFMatrix, conf->NumSpeakers,
f, &buffer, &maxlen, &saveptr))
goto fail;
}
else if(strcmp(command, "hfmatrix") == 0)
{
if(!load_ambdec_matrix(conf->HFOrderGain, conf->HFMatrix, conf->NumSpeakers,
f, &buffer, &maxlen, &saveptr))
goto fail;
}
else
{
ERR("Unexpected \"%s\" type for a dual-band decoder\n", command);
goto fail;
}
}
value = my_strtok_r(NULL, "/ \t", &saveptr);
if(!value)
{
line = read_clipped_line(f, &buffer, &maxlen);
if(!line)
{
ERR("Unexpected end of file\n");
goto fail;
}
value = my_strtok_r(line, "/ \t", &saveptr);
}
if(strcmp(value, "}") != 0)
{
ERR("Expected } after matrix definitions, got %s\n", value);
goto fail;
}
}
else if(strcmp(command, "end") == 0)
{
line = my_strtok_r(NULL, "/ \t", &saveptr);
if(line)
{
ERR("Unexpected junk on end: %s\n", line);
goto fail;
}
fclose(f);
free(buffer);
return 1;
}
else
{
ERR("Unexpected command: %s\n", command);
goto fail;
}
line = my_strtok_r(NULL, "/ \t", &saveptr);
if(line)
{
ERR("Unexpected junk on line: %s\n", line);
goto fail;
}
}
ERR("Unexpected end of file\n");
fail:
fclose(f);
free(buffer);
return 0;
}

View file

@ -1,46 +0,0 @@
#ifndef AMBDEC_H
#define AMBDEC_H
#include "alstring.h"
#include "alMain.h"
/* Helpers to read .ambdec configuration files. */
enum AmbDecScaleType {
ADS_N3D,
ADS_SN3D,
ADS_FuMa,
};
typedef struct AmbDecConf {
al_string Description;
ALuint Version; /* Must be 3 */
ALuint ChanMask;
ALuint FreqBands; /* Must be 1 or 2 */
ALsizei NumSpeakers;
enum AmbDecScaleType CoeffScale;
ALfloat XOverFreq;
ALfloat XOverRatio;
struct {
al_string Name;
ALfloat Distance;
ALfloat Azimuth;
ALfloat Elevation;
al_string Connection;
} Speakers[MAX_OUTPUT_CHANNELS];
/* Unused when FreqBands == 1 */
ALfloat LFOrderGain[MAX_AMBI_ORDER+1];
ALfloat LFMatrix[MAX_OUTPUT_CHANNELS][MAX_AMBI_COEFFS];
ALfloat HFOrderGain[MAX_AMBI_ORDER+1];
ALfloat HFMatrix[MAX_OUTPUT_CHANNELS][MAX_AMBI_COEFFS];
} AmbDecConf;
void ambdec_init(AmbDecConf *conf);
void ambdec_deinit(AmbDecConf *conf);
int ambdec_load(AmbDecConf *conf, const char *fname);
#endif /* AMBDEC_H */

View file

@ -0,0 +1,49 @@
#ifndef ALC_EVENT_H
#define ALC_EVENT_H
#include "almalloc.h"
struct EffectState;
enum class VChangeState;
using uint = unsigned int;
enum {
/* End event thread processing. */
EventType_KillThread = 0,
/* User event types. */
EventType_SourceStateChange = 1<<0,
EventType_BufferCompleted = 1<<1,
EventType_Disconnected = 1<<2,
/* Internal events. */
EventType_ReleaseEffectState = 65536,
};
struct AsyncEvent {
uint EnumType{0u};
union {
char dummy;
struct {
uint id;
VChangeState state;
} srcstate;
struct {
uint id;
uint count;
} bufcomp;
struct {
char msg[244];
} disconnect;
EffectState *mEffectState;
} u{};
AsyncEvent() noexcept = default;
constexpr AsyncEvent(uint type) noexcept : EnumType{type} { }
DISABLE_ALLOC()
};
#endif

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,19 @@
#ifndef BACKENDS_ALSA_H
#define BACKENDS_ALSA_H
#include "backends/base.h"
struct AlsaBackendFactory final : public BackendFactory {
public:
bool init() override;
bool querySupport(BackendType type) override;
std::string probe(BackendType type) override;
BackendPtr createBackend(ALCdevice *device, BackendType type) override;
static BackendFactory &getFactory();
};
#endif /* BACKENDS_ALSA_H */

View file

@ -1,83 +0,0 @@
#include "config.h"
#include <stdlib.h>
#include "alMain.h"
#include "alu.h"
#include "backends/base.h"
extern inline ALuint64 GetDeviceClockTime(ALCdevice *device);
extern inline void ALCdevice_Lock(ALCdevice *device);
extern inline void ALCdevice_Unlock(ALCdevice *device);
/* Base ALCbackend method implementations. */
void ALCbackend_Construct(ALCbackend *self, ALCdevice *device)
{
int ret = almtx_init(&self->mMutex, almtx_recursive);
assert(ret == althrd_success);
self->mDevice = device;
}
void ALCbackend_Destruct(ALCbackend *self)
{
almtx_destroy(&self->mMutex);
}
ALCboolean ALCbackend_reset(ALCbackend* UNUSED(self))
{
return ALC_FALSE;
}
ALCenum ALCbackend_captureSamples(ALCbackend* UNUSED(self), void* UNUSED(buffer), ALCuint UNUSED(samples))
{
return ALC_INVALID_DEVICE;
}
ALCuint ALCbackend_availableSamples(ALCbackend* UNUSED(self))
{
return 0;
}
ClockLatency ALCbackend_getClockLatency(ALCbackend *self)
{
ALCdevice *device = self->mDevice;
ALuint refcount;
ClockLatency ret;
do {
while(((refcount=ATOMIC_LOAD(&device->MixCount, almemory_order_acquire))&1))
althrd_yield();
ret.ClockTime = GetDeviceClockTime(device);
ATOMIC_THREAD_FENCE(almemory_order_acquire);
} while(refcount != ATOMIC_LOAD(&device->MixCount, almemory_order_relaxed));
/* NOTE: The device will generally have about all but one periods filled at
* any given time during playback. Without a more accurate measurement from
* the output, this is an okay approximation.
*/
ret.Latency = device->UpdateSize * DEVICE_CLOCK_RES / device->Frequency *
maxu(device->NumUpdates-1, 1);
return ret;
}
void ALCbackend_lock(ALCbackend *self)
{
int ret = almtx_lock(&self->mMutex);
assert(ret == althrd_success);
}
void ALCbackend_unlock(ALCbackend *self)
{
int ret = almtx_unlock(&self->mMutex);
assert(ret == althrd_success);
}
/* Base ALCbackendFactory method implementations. */
void ALCbackendFactory_deinit(ALCbackendFactory* UNUSED(self))
{
}

View file

@ -0,0 +1,195 @@
#include "config.h"
#include "base.h"
#include <atomic>
#include <thread>
#ifdef _WIN32
#define WIN32_LEAN_AND_MEAN
#include <windows.h>
#include <mmreg.h>
#endif
#include "albit.h"
#include "alcmain.h"
#include "alnumeric.h"
#include "aloptional.h"
#include "atomic.h"
#include "core/logging.h"
bool BackendBase::reset()
{ throw al::backend_exception{al::backend_error::DeviceError, "Invalid BackendBase call"}; }
void BackendBase::captureSamples(al::byte*, uint)
{ }
uint BackendBase::availableSamples()
{ return 0; }
ClockLatency BackendBase::getClockLatency()
{
ClockLatency ret;
uint refcount;
do {
refcount = mDevice->waitForMix();
ret.ClockTime = GetDeviceClockTime(mDevice);
std::atomic_thread_fence(std::memory_order_acquire);
} while(refcount != ReadRef(mDevice->MixCount));
/* NOTE: The device will generally have about all but one periods filled at
* any given time during playback. Without a more accurate measurement from
* the output, this is an okay approximation.
*/
ret.Latency = std::max(std::chrono::seconds{mDevice->BufferSize-mDevice->UpdateSize},
std::chrono::seconds::zero());
ret.Latency /= mDevice->Frequency;
return ret;
}
void BackendBase::setDefaultWFXChannelOrder()
{
mDevice->RealOut.ChannelIndex.fill(INVALID_CHANNEL_INDEX);
switch(mDevice->FmtChans)
{
case DevFmtMono:
mDevice->RealOut.ChannelIndex[FrontCenter] = 0;
break;
case DevFmtStereo:
mDevice->RealOut.ChannelIndex[FrontLeft] = 0;
mDevice->RealOut.ChannelIndex[FrontRight] = 1;
break;
case DevFmtQuad:
mDevice->RealOut.ChannelIndex[FrontLeft] = 0;
mDevice->RealOut.ChannelIndex[FrontRight] = 1;
mDevice->RealOut.ChannelIndex[BackLeft] = 2;
mDevice->RealOut.ChannelIndex[BackRight] = 3;
break;
case DevFmtX51:
mDevice->RealOut.ChannelIndex[FrontLeft] = 0;
mDevice->RealOut.ChannelIndex[FrontRight] = 1;
mDevice->RealOut.ChannelIndex[FrontCenter] = 2;
mDevice->RealOut.ChannelIndex[LFE] = 3;
mDevice->RealOut.ChannelIndex[SideLeft] = 4;
mDevice->RealOut.ChannelIndex[SideRight] = 5;
break;
case DevFmtX51Rear:
mDevice->RealOut.ChannelIndex[FrontLeft] = 0;
mDevice->RealOut.ChannelIndex[FrontRight] = 1;
mDevice->RealOut.ChannelIndex[FrontCenter] = 2;
mDevice->RealOut.ChannelIndex[LFE] = 3;
mDevice->RealOut.ChannelIndex[BackLeft] = 4;
mDevice->RealOut.ChannelIndex[BackRight] = 5;
break;
case DevFmtX61:
mDevice->RealOut.ChannelIndex[FrontLeft] = 0;
mDevice->RealOut.ChannelIndex[FrontRight] = 1;
mDevice->RealOut.ChannelIndex[FrontCenter] = 2;
mDevice->RealOut.ChannelIndex[LFE] = 3;
mDevice->RealOut.ChannelIndex[BackCenter] = 4;
mDevice->RealOut.ChannelIndex[SideLeft] = 5;
mDevice->RealOut.ChannelIndex[SideRight] = 6;
break;
case DevFmtX71:
mDevice->RealOut.ChannelIndex[FrontLeft] = 0;
mDevice->RealOut.ChannelIndex[FrontRight] = 1;
mDevice->RealOut.ChannelIndex[FrontCenter] = 2;
mDevice->RealOut.ChannelIndex[LFE] = 3;
mDevice->RealOut.ChannelIndex[BackLeft] = 4;
mDevice->RealOut.ChannelIndex[BackRight] = 5;
mDevice->RealOut.ChannelIndex[SideLeft] = 6;
mDevice->RealOut.ChannelIndex[SideRight] = 7;
break;
case DevFmtAmbi3D:
break;
}
}
void BackendBase::setDefaultChannelOrder()
{
mDevice->RealOut.ChannelIndex.fill(INVALID_CHANNEL_INDEX);
switch(mDevice->FmtChans)
{
case DevFmtX51Rear:
mDevice->RealOut.ChannelIndex[FrontLeft] = 0;
mDevice->RealOut.ChannelIndex[FrontRight] = 1;
mDevice->RealOut.ChannelIndex[BackLeft] = 2;
mDevice->RealOut.ChannelIndex[BackRight] = 3;
mDevice->RealOut.ChannelIndex[FrontCenter] = 4;
mDevice->RealOut.ChannelIndex[LFE] = 5;
return;
case DevFmtX71:
mDevice->RealOut.ChannelIndex[FrontLeft] = 0;
mDevice->RealOut.ChannelIndex[FrontRight] = 1;
mDevice->RealOut.ChannelIndex[BackLeft] = 2;
mDevice->RealOut.ChannelIndex[BackRight] = 3;
mDevice->RealOut.ChannelIndex[FrontCenter] = 4;
mDevice->RealOut.ChannelIndex[LFE] = 5;
mDevice->RealOut.ChannelIndex[SideLeft] = 6;
mDevice->RealOut.ChannelIndex[SideRight] = 7;
return;
/* Same as WFX order */
case DevFmtMono:
case DevFmtStereo:
case DevFmtQuad:
case DevFmtX51:
case DevFmtX61:
case DevFmtAmbi3D:
setDefaultWFXChannelOrder();
break;
}
}
#ifdef _WIN32
void BackendBase::setChannelOrderFromWFXMask(uint chanmask)
{
auto get_channel = [](const DWORD chanbit) noexcept -> al::optional<Channel>
{
switch(chanbit)
{
case SPEAKER_FRONT_LEFT: return al::make_optional(FrontLeft);
case SPEAKER_FRONT_RIGHT: return al::make_optional(FrontRight);
case SPEAKER_FRONT_CENTER: return al::make_optional(FrontCenter);
case SPEAKER_LOW_FREQUENCY: return al::make_optional(LFE);
case SPEAKER_BACK_LEFT: return al::make_optional(BackLeft);
case SPEAKER_BACK_RIGHT: return al::make_optional(BackRight);
case SPEAKER_FRONT_LEFT_OF_CENTER: break;
case SPEAKER_FRONT_RIGHT_OF_CENTER: break;
case SPEAKER_BACK_CENTER: return al::make_optional(BackCenter);
case SPEAKER_SIDE_LEFT: return al::make_optional(SideLeft);
case SPEAKER_SIDE_RIGHT: return al::make_optional(SideRight);
case SPEAKER_TOP_CENTER: return al::make_optional(TopCenter);
case SPEAKER_TOP_FRONT_LEFT: return al::make_optional(TopFrontLeft);
case SPEAKER_TOP_FRONT_CENTER: return al::make_optional(TopFrontCenter);
case SPEAKER_TOP_FRONT_RIGHT: return al::make_optional(TopFrontRight);
case SPEAKER_TOP_BACK_LEFT: return al::make_optional(TopBackLeft);
case SPEAKER_TOP_BACK_CENTER: return al::make_optional(TopBackCenter);
case SPEAKER_TOP_BACK_RIGHT: return al::make_optional(TopBackRight);
}
WARN("Unhandled WFX channel bit 0x%lx\n", chanbit);
return al::nullopt;
};
const uint numchans{mDevice->channelsFromFmt()};
uint idx{0};
while(chanmask)
{
const int bit{al::countr_zero(chanmask)};
const uint mask{1u << bit};
chanmask &= ~mask;
if(auto label = get_channel(mask))
{
mDevice->RealOut.ChannelIndex[*label] = idx;
if(++idx == numchans) break;
}
}
}
#endif

View file

@ -1,168 +1,119 @@
#ifndef AL_BACKENDS_BASE_H
#define AL_BACKENDS_BASE_H
#ifndef ALC_BACKENDS_BASE_H
#define ALC_BACKENDS_BASE_H
#include "alMain.h"
#include "threads.h"
#include <chrono>
#include <memory>
#include <mutex>
#include <string>
#include "albyte.h"
#include "alcmain.h"
#include "core/except.h"
#ifdef __cplusplus
extern "C" {
using uint = unsigned int;
struct ClockLatency {
std::chrono::nanoseconds ClockTime;
std::chrono::nanoseconds Latency;
};
struct BackendBase {
virtual void open(const char *name) = 0;
virtual bool reset();
virtual void start() = 0;
virtual void stop() = 0;
virtual void captureSamples(al::byte *buffer, uint samples);
virtual uint availableSamples();
virtual ClockLatency getClockLatency();
ALCdevice *const mDevice;
BackendBase(ALCdevice *device) noexcept : mDevice{device} { }
virtual ~BackendBase() = default;
protected:
/** Sets the default channel order used by most non-WaveFormatEx-based APIs. */
void setDefaultChannelOrder();
/** Sets the default channel order used by WaveFormatEx. */
void setDefaultWFXChannelOrder();
#ifdef _WIN32
/** Sets the channel order given the WaveFormatEx mask. */
void setChannelOrderFromWFXMask(uint chanmask);
#endif
};
using BackendPtr = std::unique_ptr<BackendBase>;
enum class BackendType {
Playback,
Capture
};
typedef struct ClockLatency {
ALint64 ClockTime;
ALint64 Latency;
} ClockLatency;
/* Helper to get the current clock time from the device's ClockBase, and
* SamplesDone converted from the sample rate.
*/
inline ALuint64 GetDeviceClockTime(ALCdevice *device)
inline std::chrono::nanoseconds GetDeviceClockTime(ALCdevice *device)
{
return device->ClockBase + (device->SamplesDone * DEVICE_CLOCK_RES /
device->Frequency);
using std::chrono::seconds;
using std::chrono::nanoseconds;
auto ns = nanoseconds{seconds{device->SamplesDone}} / device->Frequency;
return device->ClockBase + ns;
}
/* Helper to get the device latency from the backend, including any fixed
* latency from post-processing.
*/
inline ClockLatency GetClockLatency(ALCdevice *device)
{
BackendBase *backend{device->Backend.get()};
ClockLatency ret{backend->getClockLatency()};
ret.Latency += device->FixedLatency;
return ret;
}
struct ALCbackendVtable;
struct BackendFactory {
virtual bool init() = 0;
typedef struct ALCbackend {
const struct ALCbackendVtable *vtbl;
virtual bool querySupport(BackendType type) = 0;
ALCdevice *mDevice;
virtual std::string probe(BackendType type) = 0;
almtx_t mMutex;
} ALCbackend;
virtual BackendPtr createBackend(ALCdevice *device, BackendType type) = 0;
void ALCbackend_Construct(ALCbackend *self, ALCdevice *device);
void ALCbackend_Destruct(ALCbackend *self);
ALCboolean ALCbackend_reset(ALCbackend *self);
ALCenum ALCbackend_captureSamples(ALCbackend *self, void *buffer, ALCuint samples);
ALCuint ALCbackend_availableSamples(ALCbackend *self);
ClockLatency ALCbackend_getClockLatency(ALCbackend *self);
void ALCbackend_lock(ALCbackend *self);
void ALCbackend_unlock(ALCbackend *self);
struct ALCbackendVtable {
void (*const Destruct)(ALCbackend*);
ALCenum (*const open)(ALCbackend*, const ALCchar*);
ALCboolean (*const reset)(ALCbackend*);
ALCboolean (*const start)(ALCbackend*);
void (*const stop)(ALCbackend*);
ALCenum (*const captureSamples)(ALCbackend*, void*, ALCuint);
ALCuint (*const availableSamples)(ALCbackend*);
ClockLatency (*const getClockLatency)(ALCbackend*);
void (*const lock)(ALCbackend*);
void (*const unlock)(ALCbackend*);
void (*const Delete)(void*);
protected:
virtual ~BackendFactory() = default;
};
#define DEFINE_ALCBACKEND_VTABLE(T) \
DECLARE_THUNK(T, ALCbackend, void, Destruct) \
DECLARE_THUNK1(T, ALCbackend, ALCenum, open, const ALCchar*) \
DECLARE_THUNK(T, ALCbackend, ALCboolean, reset) \
DECLARE_THUNK(T, ALCbackend, ALCboolean, start) \
DECLARE_THUNK(T, ALCbackend, void, stop) \
DECLARE_THUNK2(T, ALCbackend, ALCenum, captureSamples, void*, ALCuint) \
DECLARE_THUNK(T, ALCbackend, ALCuint, availableSamples) \
DECLARE_THUNK(T, ALCbackend, ClockLatency, getClockLatency) \
DECLARE_THUNK(T, ALCbackend, void, lock) \
DECLARE_THUNK(T, ALCbackend, void, unlock) \
static void T##_ALCbackend_Delete(void *ptr) \
{ T##_Delete(STATIC_UPCAST(T, ALCbackend, (ALCbackend*)ptr)); } \
\
static const struct ALCbackendVtable T##_ALCbackend_vtable = { \
T##_ALCbackend_Destruct, \
\
T##_ALCbackend_open, \
T##_ALCbackend_reset, \
T##_ALCbackend_start, \
T##_ALCbackend_stop, \
T##_ALCbackend_captureSamples, \
T##_ALCbackend_availableSamples, \
T##_ALCbackend_getClockLatency, \
T##_ALCbackend_lock, \
T##_ALCbackend_unlock, \
\
T##_ALCbackend_Delete, \
}
namespace al {
typedef enum ALCbackend_Type {
ALCbackend_Playback,
ALCbackend_Capture,
ALCbackend_Loopback
} ALCbackend_Type;
struct ALCbackendFactoryVtable;
typedef struct ALCbackendFactory {
const struct ALCbackendFactoryVtable *vtbl;
} ALCbackendFactory;
void ALCbackendFactory_deinit(ALCbackendFactory *self);
struct ALCbackendFactoryVtable {
ALCboolean (*const init)(ALCbackendFactory *self);
void (*const deinit)(ALCbackendFactory *self);
ALCboolean (*const querySupport)(ALCbackendFactory *self, ALCbackend_Type type);
void (*const probe)(ALCbackendFactory *self, enum DevProbe type);
ALCbackend* (*const createBackend)(ALCbackendFactory *self, ALCdevice *device, ALCbackend_Type type);
enum class backend_error {
NoDevice,
DeviceError,
OutOfMemory
};
#define DEFINE_ALCBACKENDFACTORY_VTABLE(T) \
DECLARE_THUNK(T, ALCbackendFactory, ALCboolean, init) \
DECLARE_THUNK(T, ALCbackendFactory, void, deinit) \
DECLARE_THUNK1(T, ALCbackendFactory, ALCboolean, querySupport, ALCbackend_Type) \
DECLARE_THUNK1(T, ALCbackendFactory, void, probe, enum DevProbe) \
DECLARE_THUNK2(T, ALCbackendFactory, ALCbackend*, createBackend, ALCdevice*, ALCbackend_Type) \
\
static const struct ALCbackendFactoryVtable T##_ALCbackendFactory_vtable = { \
T##_ALCbackendFactory_init, \
T##_ALCbackendFactory_deinit, \
T##_ALCbackendFactory_querySupport, \
T##_ALCbackendFactory_probe, \
T##_ALCbackendFactory_createBackend, \
}
class backend_exception final : public base_exception {
backend_error mErrorCode;
public:
[[gnu::format(printf, 3, 4)]]
backend_exception(backend_error code, const char *msg, ...) : mErrorCode{code}
{
std::va_list args;
va_start(args, msg);
setMessage(msg, args);
va_end(args);
}
backend_error errorCode() const noexcept { return mErrorCode; }
};
ALCbackendFactory *ALCpulseBackendFactory_getFactory(void);
ALCbackendFactory *ALCalsaBackendFactory_getFactory(void);
ALCbackendFactory *ALCcoreAudioBackendFactory_getFactory(void);
ALCbackendFactory *ALCossBackendFactory_getFactory(void);
ALCbackendFactory *ALCjackBackendFactory_getFactory(void);
ALCbackendFactory *ALCsolarisBackendFactory_getFactory(void);
ALCbackendFactory *ALCsndioBackendFactory_getFactory(void);
ALCbackendFactory *ALCqsaBackendFactory_getFactory(void);
ALCbackendFactory *ALCwasapiBackendFactory_getFactory(void);
ALCbackendFactory *ALCdsoundBackendFactory_getFactory(void);
ALCbackendFactory *ALCwinmmBackendFactory_getFactory(void);
ALCbackendFactory *ALCportBackendFactory_getFactory(void);
ALCbackendFactory *ALCopenslBackendFactory_getFactory(void);
ALCbackendFactory *ALCnullBackendFactory_getFactory(void);
ALCbackendFactory *ALCwaveBackendFactory_getFactory(void);
ALCbackendFactory *ALCsdl2BackendFactory_getFactory(void);
ALCbackendFactory *ALCloopbackFactory_getFactory(void);
} // namespace al
inline void ALCdevice_Lock(ALCdevice *device)
{ V0(device->Backend,lock)(); }
inline void ALCdevice_Unlock(ALCdevice *device)
{ V0(device->Backend,unlock)(); }
#ifdef __cplusplus
} /* extern "C" */
#endif
#endif /* AL_BACKENDS_BASE_H */
#endif /* ALC_BACKENDS_BASE_H */

View file

@ -1,810 +0,0 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 1999-2007 by authors.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "alMain.h"
#include "alu.h"
#include "ringbuffer.h"
#include <CoreServices/CoreServices.h>
#include <unistd.h>
#include <AudioUnit/AudioUnit.h>
#include <AudioToolbox/AudioToolbox.h>
#include "backends/base.h"
static const ALCchar ca_device[] = "CoreAudio Default";
typedef struct ALCcoreAudioPlayback {
DERIVE_FROM_TYPE(ALCbackend);
AudioUnit audioUnit;
ALuint frameSize;
AudioStreamBasicDescription format; // This is the OpenAL format as a CoreAudio ASBD
} ALCcoreAudioPlayback;
static void ALCcoreAudioPlayback_Construct(ALCcoreAudioPlayback *self, ALCdevice *device);
static void ALCcoreAudioPlayback_Destruct(ALCcoreAudioPlayback *self);
static ALCenum ALCcoreAudioPlayback_open(ALCcoreAudioPlayback *self, const ALCchar *name);
static ALCboolean ALCcoreAudioPlayback_reset(ALCcoreAudioPlayback *self);
static ALCboolean ALCcoreAudioPlayback_start(ALCcoreAudioPlayback *self);
static void ALCcoreAudioPlayback_stop(ALCcoreAudioPlayback *self);
static DECLARE_FORWARD2(ALCcoreAudioPlayback, ALCbackend, ALCenum, captureSamples, void*, ALCuint)
static DECLARE_FORWARD(ALCcoreAudioPlayback, ALCbackend, ALCuint, availableSamples)
static DECLARE_FORWARD(ALCcoreAudioPlayback, ALCbackend, ClockLatency, getClockLatency)
static DECLARE_FORWARD(ALCcoreAudioPlayback, ALCbackend, void, lock)
static DECLARE_FORWARD(ALCcoreAudioPlayback, ALCbackend, void, unlock)
DECLARE_DEFAULT_ALLOCATORS(ALCcoreAudioPlayback)
DEFINE_ALCBACKEND_VTABLE(ALCcoreAudioPlayback);
static void ALCcoreAudioPlayback_Construct(ALCcoreAudioPlayback *self, ALCdevice *device)
{
ALCbackend_Construct(STATIC_CAST(ALCbackend, self), device);
SET_VTABLE2(ALCcoreAudioPlayback, ALCbackend, self);
self->frameSize = 0;
memset(&self->format, 0, sizeof(self->format));
}
static void ALCcoreAudioPlayback_Destruct(ALCcoreAudioPlayback *self)
{
AudioUnitUninitialize(self->audioUnit);
AudioComponentInstanceDispose(self->audioUnit);
ALCbackend_Destruct(STATIC_CAST(ALCbackend, self));
}
static OSStatus ALCcoreAudioPlayback_MixerProc(void *inRefCon,
AudioUnitRenderActionFlags* UNUSED(ioActionFlags), const AudioTimeStamp* UNUSED(inTimeStamp),
UInt32 UNUSED(inBusNumber), UInt32 UNUSED(inNumberFrames), AudioBufferList *ioData)
{
ALCcoreAudioPlayback *self = inRefCon;
ALCdevice *device = STATIC_CAST(ALCbackend,self)->mDevice;
ALCcoreAudioPlayback_lock(self);
aluMixData(device, ioData->mBuffers[0].mData,
ioData->mBuffers[0].mDataByteSize / self->frameSize);
ALCcoreAudioPlayback_unlock(self);
return noErr;
}
static ALCenum ALCcoreAudioPlayback_open(ALCcoreAudioPlayback *self, const ALCchar *name)
{
ALCdevice *device = STATIC_CAST(ALCbackend,self)->mDevice;
AudioComponentDescription desc;
AudioComponent comp;
OSStatus err;
if(!name)
name = ca_device;
else if(strcmp(name, ca_device) != 0)
return ALC_INVALID_VALUE;
/* open the default output unit */
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_DefaultOutput;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
comp = AudioComponentFindNext(NULL, &desc);
if(comp == NULL)
{
ERR("AudioComponentFindNext failed\n");
return ALC_INVALID_VALUE;
}
err = AudioComponentInstanceNew(comp, &self->audioUnit);
if(err != noErr)
{
ERR("AudioComponentInstanceNew failed\n");
return ALC_INVALID_VALUE;
}
/* init and start the default audio unit... */
err = AudioUnitInitialize(self->audioUnit);
if(err != noErr)
{
ERR("AudioUnitInitialize failed\n");
AudioComponentInstanceDispose(self->audioUnit);
return ALC_INVALID_VALUE;
}
alstr_copy_cstr(&device->DeviceName, name);
return ALC_NO_ERROR;
}
static ALCboolean ALCcoreAudioPlayback_reset(ALCcoreAudioPlayback *self)
{
ALCdevice *device = STATIC_CAST(ALCbackend,self)->mDevice;
AudioStreamBasicDescription streamFormat;
AURenderCallbackStruct input;
OSStatus err;
UInt32 size;
err = AudioUnitUninitialize(self->audioUnit);
if(err != noErr)
ERR("-- AudioUnitUninitialize failed.\n");
/* retrieve default output unit's properties (output side) */
size = sizeof(AudioStreamBasicDescription);
err = AudioUnitGetProperty(self->audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, &streamFormat, &size);
if(err != noErr || size != sizeof(AudioStreamBasicDescription))
{
ERR("AudioUnitGetProperty failed\n");
return ALC_FALSE;
}
#if 0
TRACE("Output streamFormat of default output unit -\n");
TRACE(" streamFormat.mFramesPerPacket = %d\n", streamFormat.mFramesPerPacket);
TRACE(" streamFormat.mChannelsPerFrame = %d\n", streamFormat.mChannelsPerFrame);
TRACE(" streamFormat.mBitsPerChannel = %d\n", streamFormat.mBitsPerChannel);
TRACE(" streamFormat.mBytesPerPacket = %d\n", streamFormat.mBytesPerPacket);
TRACE(" streamFormat.mBytesPerFrame = %d\n", streamFormat.mBytesPerFrame);
TRACE(" streamFormat.mSampleRate = %5.0f\n", streamFormat.mSampleRate);
#endif
/* set default output unit's input side to match output side */
err = AudioUnitSetProperty(self->audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &streamFormat, size);
if(err != noErr)
{
ERR("AudioUnitSetProperty failed\n");
return ALC_FALSE;
}
if(device->Frequency != streamFormat.mSampleRate)
{
device->NumUpdates = (ALuint)((ALuint64)device->NumUpdates *
streamFormat.mSampleRate /
device->Frequency);
device->Frequency = streamFormat.mSampleRate;
}
/* FIXME: How to tell what channels are what in the output device, and how
* to specify what we're giving? eg, 6.0 vs 5.1 */
switch(streamFormat.mChannelsPerFrame)
{
case 1:
device->FmtChans = DevFmtMono;
break;
case 2:
device->FmtChans = DevFmtStereo;
break;
case 4:
device->FmtChans = DevFmtQuad;
break;
case 6:
device->FmtChans = DevFmtX51;
break;
case 7:
device->FmtChans = DevFmtX61;
break;
case 8:
device->FmtChans = DevFmtX71;
break;
default:
ERR("Unhandled channel count (%d), using Stereo\n", streamFormat.mChannelsPerFrame);
device->FmtChans = DevFmtStereo;
streamFormat.mChannelsPerFrame = 2;
break;
}
SetDefaultWFXChannelOrder(device);
/* use channel count and sample rate from the default output unit's current
* parameters, but reset everything else */
streamFormat.mFramesPerPacket = 1;
streamFormat.mFormatFlags = 0;
switch(device->FmtType)
{
case DevFmtUByte:
device->FmtType = DevFmtByte;
/* fall-through */
case DevFmtByte:
streamFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger;
streamFormat.mBitsPerChannel = 8;
break;
case DevFmtUShort:
device->FmtType = DevFmtShort;
/* fall-through */
case DevFmtShort:
streamFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger;
streamFormat.mBitsPerChannel = 16;
break;
case DevFmtUInt:
device->FmtType = DevFmtInt;
/* fall-through */
case DevFmtInt:
streamFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger;
streamFormat.mBitsPerChannel = 32;
break;
case DevFmtFloat:
streamFormat.mFormatFlags = kLinearPCMFormatFlagIsFloat;
streamFormat.mBitsPerChannel = 32;
break;
}
streamFormat.mBytesPerFrame = streamFormat.mChannelsPerFrame *
streamFormat.mBitsPerChannel / 8;
streamFormat.mBytesPerPacket = streamFormat.mBytesPerFrame;
streamFormat.mFormatID = kAudioFormatLinearPCM;
streamFormat.mFormatFlags |= kAudioFormatFlagsNativeEndian |
kLinearPCMFormatFlagIsPacked;
err = AudioUnitSetProperty(self->audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &streamFormat, sizeof(AudioStreamBasicDescription));
if(err != noErr)
{
ERR("AudioUnitSetProperty failed\n");
return ALC_FALSE;
}
/* setup callback */
self->frameSize = FrameSizeFromDevFmt(device->FmtChans, device->FmtType, device->AmbiOrder);
input.inputProc = ALCcoreAudioPlayback_MixerProc;
input.inputProcRefCon = self;
err = AudioUnitSetProperty(self->audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &input, sizeof(AURenderCallbackStruct));
if(err != noErr)
{
ERR("AudioUnitSetProperty failed\n");
return ALC_FALSE;
}
/* init the default audio unit... */
err = AudioUnitInitialize(self->audioUnit);
if(err != noErr)
{
ERR("AudioUnitInitialize failed\n");
return ALC_FALSE;
}
return ALC_TRUE;
}
static ALCboolean ALCcoreAudioPlayback_start(ALCcoreAudioPlayback *self)
{
OSStatus err = AudioOutputUnitStart(self->audioUnit);
if(err != noErr)
{
ERR("AudioOutputUnitStart failed\n");
return ALC_FALSE;
}
return ALC_TRUE;
}
static void ALCcoreAudioPlayback_stop(ALCcoreAudioPlayback *self)
{
OSStatus err = AudioOutputUnitStop(self->audioUnit);
if(err != noErr)
ERR("AudioOutputUnitStop failed\n");
}
typedef struct ALCcoreAudioCapture {
DERIVE_FROM_TYPE(ALCbackend);
AudioUnit audioUnit;
ALuint frameSize;
ALdouble sampleRateRatio; // Ratio of hardware sample rate / requested sample rate
AudioStreamBasicDescription format; // This is the OpenAL format as a CoreAudio ASBD
AudioConverterRef audioConverter; // Sample rate converter if needed
AudioBufferList *bufferList; // Buffer for data coming from the input device
ALCvoid *resampleBuffer; // Buffer for returned RingBuffer data when resampling
ll_ringbuffer_t *ring;
} ALCcoreAudioCapture;
static void ALCcoreAudioCapture_Construct(ALCcoreAudioCapture *self, ALCdevice *device);
static void ALCcoreAudioCapture_Destruct(ALCcoreAudioCapture *self);
static ALCenum ALCcoreAudioCapture_open(ALCcoreAudioCapture *self, const ALCchar *name);
static DECLARE_FORWARD(ALCcoreAudioCapture, ALCbackend, ALCboolean, reset)
static ALCboolean ALCcoreAudioCapture_start(ALCcoreAudioCapture *self);
static void ALCcoreAudioCapture_stop(ALCcoreAudioCapture *self);
static ALCenum ALCcoreAudioCapture_captureSamples(ALCcoreAudioCapture *self, ALCvoid *buffer, ALCuint samples);
static ALCuint ALCcoreAudioCapture_availableSamples(ALCcoreAudioCapture *self);
static DECLARE_FORWARD(ALCcoreAudioCapture, ALCbackend, ClockLatency, getClockLatency)
static DECLARE_FORWARD(ALCcoreAudioCapture, ALCbackend, void, lock)
static DECLARE_FORWARD(ALCcoreAudioCapture, ALCbackend, void, unlock)
DECLARE_DEFAULT_ALLOCATORS(ALCcoreAudioCapture)
DEFINE_ALCBACKEND_VTABLE(ALCcoreAudioCapture);
static AudioBufferList *allocate_buffer_list(UInt32 channelCount, UInt32 byteSize)
{
AudioBufferList *list;
list = calloc(1, FAM_SIZE(AudioBufferList, mBuffers, 1) + byteSize);
if(list)
{
list->mNumberBuffers = 1;
list->mBuffers[0].mNumberChannels = channelCount;
list->mBuffers[0].mDataByteSize = byteSize;
list->mBuffers[0].mData = &list->mBuffers[1];
}
return list;
}
static void destroy_buffer_list(AudioBufferList *list)
{
free(list);
}
static void ALCcoreAudioCapture_Construct(ALCcoreAudioCapture *self, ALCdevice *device)
{
ALCbackend_Construct(STATIC_CAST(ALCbackend, self), device);
SET_VTABLE2(ALCcoreAudioCapture, ALCbackend, self);
self->audioUnit = 0;
self->audioConverter = NULL;
self->bufferList = NULL;
self->resampleBuffer = NULL;
self->ring = NULL;
}
static void ALCcoreAudioCapture_Destruct(ALCcoreAudioCapture *self)
{
ll_ringbuffer_free(self->ring);
self->ring = NULL;
free(self->resampleBuffer);
self->resampleBuffer = NULL;
destroy_buffer_list(self->bufferList);
self->bufferList = NULL;
if(self->audioConverter)
AudioConverterDispose(self->audioConverter);
self->audioConverter = NULL;
if(self->audioUnit)
AudioComponentInstanceDispose(self->audioUnit);
self->audioUnit = 0;
ALCbackend_Destruct(STATIC_CAST(ALCbackend, self));
}
static OSStatus ALCcoreAudioCapture_RecordProc(void *inRefCon,
AudioUnitRenderActionFlags* UNUSED(ioActionFlags),
const AudioTimeStamp *inTimeStamp, UInt32 UNUSED(inBusNumber),
UInt32 inNumberFrames, AudioBufferList* UNUSED(ioData))
{
ALCcoreAudioCapture *self = inRefCon;
AudioUnitRenderActionFlags flags = 0;
OSStatus err;
// fill the bufferList with data from the input device
err = AudioUnitRender(self->audioUnit, &flags, inTimeStamp, 1, inNumberFrames, self->bufferList);
if(err != noErr)
{
ERR("AudioUnitRender error: %d\n", err);
return err;
}
ll_ringbuffer_write(self->ring, self->bufferList->mBuffers[0].mData, inNumberFrames);
return noErr;
}
static OSStatus ALCcoreAudioCapture_ConvertCallback(AudioConverterRef UNUSED(inAudioConverter),
UInt32 *ioNumberDataPackets, AudioBufferList *ioData,
AudioStreamPacketDescription** UNUSED(outDataPacketDescription),
void *inUserData)
{
ALCcoreAudioCapture *self = inUserData;
// Read from the ring buffer and store temporarily in a large buffer
ll_ringbuffer_read(self->ring, self->resampleBuffer, *ioNumberDataPackets);
// Set the input data
ioData->mNumberBuffers = 1;
ioData->mBuffers[0].mNumberChannels = self->format.mChannelsPerFrame;
ioData->mBuffers[0].mData = self->resampleBuffer;
ioData->mBuffers[0].mDataByteSize = (*ioNumberDataPackets) * self->format.mBytesPerFrame;
return noErr;
}
static ALCenum ALCcoreAudioCapture_open(ALCcoreAudioCapture *self, const ALCchar *name)
{
ALCdevice *device = STATIC_CAST(ALCbackend,self)->mDevice;
AudioStreamBasicDescription requestedFormat; // The application requested format
AudioStreamBasicDescription hardwareFormat; // The hardware format
AudioStreamBasicDescription outputFormat; // The AudioUnit output format
AURenderCallbackStruct input;
AudioComponentDescription desc;
AudioDeviceID inputDevice;
UInt32 outputFrameCount;
UInt32 propertySize;
AudioObjectPropertyAddress propertyAddress;
UInt32 enableIO;
AudioComponent comp;
OSStatus err;
if(!name)
name = ca_device;
else if(strcmp(name, ca_device) != 0)
return ALC_INVALID_VALUE;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_HALOutput;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
// Search for component with given description
comp = AudioComponentFindNext(NULL, &desc);
if(comp == NULL)
{
ERR("AudioComponentFindNext failed\n");
return ALC_INVALID_VALUE;
}
// Open the component
err = AudioComponentInstanceNew(comp, &self->audioUnit);
if(err != noErr)
{
ERR("AudioComponentInstanceNew failed\n");
goto error;
}
// Turn off AudioUnit output
enableIO = 0;
err = AudioUnitSetProperty(self->audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, 0, &enableIO, sizeof(ALuint));
if(err != noErr)
{
ERR("AudioUnitSetProperty failed\n");
goto error;
}
// Turn on AudioUnit input
enableIO = 1;
err = AudioUnitSetProperty(self->audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &enableIO, sizeof(ALuint));
if(err != noErr)
{
ERR("AudioUnitSetProperty failed\n");
goto error;
}
// Get the default input device
propertySize = sizeof(AudioDeviceID);
propertyAddress.mSelector = kAudioHardwarePropertyDefaultInputDevice;
propertyAddress.mScope = kAudioObjectPropertyScopeGlobal;
propertyAddress.mElement = kAudioObjectPropertyElementMaster;
err = AudioObjectGetPropertyData(kAudioObjectSystemObject, &propertyAddress, 0, NULL, &propertySize, &inputDevice);
if(err != noErr)
{
ERR("AudioObjectGetPropertyData failed\n");
goto error;
}
if(inputDevice == kAudioDeviceUnknown)
{
ERR("No input device found\n");
goto error;
}
// Track the input device
err = AudioUnitSetProperty(self->audioUnit, kAudioOutputUnitProperty_CurrentDevice, kAudioUnitScope_Global, 0, &inputDevice, sizeof(AudioDeviceID));
if(err != noErr)
{
ERR("AudioUnitSetProperty failed\n");
goto error;
}
// set capture callback
input.inputProc = ALCcoreAudioCapture_RecordProc;
input.inputProcRefCon = self;
err = AudioUnitSetProperty(self->audioUnit, kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, 0, &input, sizeof(AURenderCallbackStruct));
if(err != noErr)
{
ERR("AudioUnitSetProperty failed\n");
goto error;
}
// Initialize the device
err = AudioUnitInitialize(self->audioUnit);
if(err != noErr)
{
ERR("AudioUnitInitialize failed\n");
goto error;
}
// Get the hardware format
propertySize = sizeof(AudioStreamBasicDescription);
err = AudioUnitGetProperty(self->audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 1, &hardwareFormat, &propertySize);
if(err != noErr || propertySize != sizeof(AudioStreamBasicDescription))
{
ERR("AudioUnitGetProperty failed\n");
goto error;
}
// Set up the requested format description
switch(device->FmtType)
{
case DevFmtUByte:
requestedFormat.mBitsPerChannel = 8;
requestedFormat.mFormatFlags = kAudioFormatFlagIsPacked;
break;
case DevFmtShort:
requestedFormat.mBitsPerChannel = 16;
requestedFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked;
break;
case DevFmtInt:
requestedFormat.mBitsPerChannel = 32;
requestedFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked;
break;
case DevFmtFloat:
requestedFormat.mBitsPerChannel = 32;
requestedFormat.mFormatFlags = kAudioFormatFlagIsPacked;
break;
case DevFmtByte:
case DevFmtUShort:
case DevFmtUInt:
ERR("%s samples not supported\n", DevFmtTypeString(device->FmtType));
goto error;
}
switch(device->FmtChans)
{
case DevFmtMono:
requestedFormat.mChannelsPerFrame = 1;
break;
case DevFmtStereo:
requestedFormat.mChannelsPerFrame = 2;
break;
case DevFmtQuad:
case DevFmtX51:
case DevFmtX51Rear:
case DevFmtX61:
case DevFmtX71:
case DevFmtAmbi3D:
ERR("%s not supported\n", DevFmtChannelsString(device->FmtChans));
goto error;
}
requestedFormat.mBytesPerFrame = requestedFormat.mChannelsPerFrame * requestedFormat.mBitsPerChannel / 8;
requestedFormat.mBytesPerPacket = requestedFormat.mBytesPerFrame;
requestedFormat.mSampleRate = device->Frequency;
requestedFormat.mFormatID = kAudioFormatLinearPCM;
requestedFormat.mReserved = 0;
requestedFormat.mFramesPerPacket = 1;
// save requested format description for later use
self->format = requestedFormat;
self->frameSize = FrameSizeFromDevFmt(device->FmtChans, device->FmtType, device->AmbiOrder);
// Use intermediate format for sample rate conversion (outputFormat)
// Set sample rate to the same as hardware for resampling later
outputFormat = requestedFormat;
outputFormat.mSampleRate = hardwareFormat.mSampleRate;
// Determine sample rate ratio for resampling
self->sampleRateRatio = outputFormat.mSampleRate / device->Frequency;
// The output format should be the requested format, but using the hardware sample rate
// This is because the AudioUnit will automatically scale other properties, except for sample rate
err = AudioUnitSetProperty(self->audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, (void *)&outputFormat, sizeof(outputFormat));
if(err != noErr)
{
ERR("AudioUnitSetProperty failed\n");
goto error;
}
// Set the AudioUnit output format frame count
outputFrameCount = device->UpdateSize * self->sampleRateRatio;
err = AudioUnitSetProperty(self->audioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Output, 0, &outputFrameCount, sizeof(outputFrameCount));
if(err != noErr)
{
ERR("AudioUnitSetProperty failed: %d\n", err);
goto error;
}
// Set up sample converter
err = AudioConverterNew(&outputFormat, &requestedFormat, &self->audioConverter);
if(err != noErr)
{
ERR("AudioConverterNew failed: %d\n", err);
goto error;
}
// Create a buffer for use in the resample callback
self->resampleBuffer = malloc(device->UpdateSize * self->frameSize * self->sampleRateRatio);
// Allocate buffer for the AudioUnit output
self->bufferList = allocate_buffer_list(outputFormat.mChannelsPerFrame, device->UpdateSize * self->frameSize * self->sampleRateRatio);
if(self->bufferList == NULL)
goto error;
self->ring = ll_ringbuffer_create(
(size_t)ceil(device->UpdateSize*self->sampleRateRatio*device->NumUpdates),
self->frameSize, false
);
if(!self->ring) goto error;
alstr_copy_cstr(&device->DeviceName, name);
return ALC_NO_ERROR;
error:
ll_ringbuffer_free(self->ring);
self->ring = NULL;
free(self->resampleBuffer);
self->resampleBuffer = NULL;
destroy_buffer_list(self->bufferList);
self->bufferList = NULL;
if(self->audioConverter)
AudioConverterDispose(self->audioConverter);
self->audioConverter = NULL;
if(self->audioUnit)
AudioComponentInstanceDispose(self->audioUnit);
self->audioUnit = 0;
return ALC_INVALID_VALUE;
}
static ALCboolean ALCcoreAudioCapture_start(ALCcoreAudioCapture *self)
{
OSStatus err = AudioOutputUnitStart(self->audioUnit);
if(err != noErr)
{
ERR("AudioOutputUnitStart failed\n");
return ALC_FALSE;
}
return ALC_TRUE;
}
static void ALCcoreAudioCapture_stop(ALCcoreAudioCapture *self)
{
OSStatus err = AudioOutputUnitStop(self->audioUnit);
if(err != noErr)
ERR("AudioOutputUnitStop failed\n");
}
static ALCenum ALCcoreAudioCapture_captureSamples(ALCcoreAudioCapture *self, ALCvoid *buffer, ALCuint samples)
{
union {
ALbyte _[sizeof(AudioBufferList) + sizeof(AudioBuffer)];
AudioBufferList list;
} audiobuf = { { 0 } };
UInt32 frameCount;
OSStatus err;
// If no samples are requested, just return
if(samples == 0) return ALC_NO_ERROR;
// Point the resampling buffer to the capture buffer
audiobuf.list.mNumberBuffers = 1;
audiobuf.list.mBuffers[0].mNumberChannels = self->format.mChannelsPerFrame;
audiobuf.list.mBuffers[0].mDataByteSize = samples * self->frameSize;
audiobuf.list.mBuffers[0].mData = buffer;
// Resample into another AudioBufferList
frameCount = samples;
err = AudioConverterFillComplexBuffer(self->audioConverter,
ALCcoreAudioCapture_ConvertCallback, self, &frameCount, &audiobuf.list, NULL
);
if(err != noErr)
{
ERR("AudioConverterFillComplexBuffer error: %d\n", err);
return ALC_INVALID_VALUE;
}
return ALC_NO_ERROR;
}
static ALCuint ALCcoreAudioCapture_availableSamples(ALCcoreAudioCapture *self)
{
return ll_ringbuffer_read_space(self->ring) / self->sampleRateRatio;
}
typedef struct ALCcoreAudioBackendFactory {
DERIVE_FROM_TYPE(ALCbackendFactory);
} ALCcoreAudioBackendFactory;
#define ALCCOREAUDIOBACKENDFACTORY_INITIALIZER { { GET_VTABLE2(ALCcoreAudioBackendFactory, ALCbackendFactory) } }
ALCbackendFactory *ALCcoreAudioBackendFactory_getFactory(void);
static ALCboolean ALCcoreAudioBackendFactory_init(ALCcoreAudioBackendFactory *self);
static DECLARE_FORWARD(ALCcoreAudioBackendFactory, ALCbackendFactory, void, deinit)
static ALCboolean ALCcoreAudioBackendFactory_querySupport(ALCcoreAudioBackendFactory *self, ALCbackend_Type type);
static void ALCcoreAudioBackendFactory_probe(ALCcoreAudioBackendFactory *self, enum DevProbe type);
static ALCbackend* ALCcoreAudioBackendFactory_createBackend(ALCcoreAudioBackendFactory *self, ALCdevice *device, ALCbackend_Type type);
DEFINE_ALCBACKENDFACTORY_VTABLE(ALCcoreAudioBackendFactory);
ALCbackendFactory *ALCcoreAudioBackendFactory_getFactory(void)
{
static ALCcoreAudioBackendFactory factory = ALCCOREAUDIOBACKENDFACTORY_INITIALIZER;
return STATIC_CAST(ALCbackendFactory, &factory);
}
static ALCboolean ALCcoreAudioBackendFactory_init(ALCcoreAudioBackendFactory* UNUSED(self))
{
return ALC_TRUE;
}
static ALCboolean ALCcoreAudioBackendFactory_querySupport(ALCcoreAudioBackendFactory* UNUSED(self), ALCbackend_Type type)
{
if(type == ALCbackend_Playback || ALCbackend_Capture)
return ALC_TRUE;
return ALC_FALSE;
}
static void ALCcoreAudioBackendFactory_probe(ALCcoreAudioBackendFactory* UNUSED(self), enum DevProbe type)
{
switch(type)
{
case ALL_DEVICE_PROBE:
AppendAllDevicesList(ca_device);
break;
case CAPTURE_DEVICE_PROBE:
AppendCaptureDeviceList(ca_device);
break;
}
}
static ALCbackend* ALCcoreAudioBackendFactory_createBackend(ALCcoreAudioBackendFactory* UNUSED(self), ALCdevice *device, ALCbackend_Type type)
{
if(type == ALCbackend_Playback)
{
ALCcoreAudioPlayback *backend;
NEW_OBJ(backend, ALCcoreAudioPlayback)(device);
if(!backend) return NULL;
return STATIC_CAST(ALCbackend, backend);
}
if(type == ALCbackend_Capture)
{
ALCcoreAudioCapture *backend;
NEW_OBJ(backend, ALCcoreAudioCapture)(device);
if(!backend) return NULL;
return STATIC_CAST(ALCbackend, backend);
}
return NULL;
}

View file

@ -0,0 +1,686 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 1999-2007 by authors.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include "backends/coreaudio.h"
#include <inttypes.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <cmath>
#include "alcmain.h"
#include "alu.h"
#include "ringbuffer.h"
#include "converter.h"
#include "core/logging.h"
#include "backends/base.h"
#include <unistd.h>
#include <AudioUnit/AudioUnit.h>
#include <AudioToolbox/AudioToolbox.h>
namespace {
static const char ca_device[] = "CoreAudio Default";
struct CoreAudioPlayback final : public BackendBase {
CoreAudioPlayback(ALCdevice *device) noexcept : BackendBase{device} { }
~CoreAudioPlayback() override;
OSStatus MixerProc(AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames,
AudioBufferList *ioData) noexcept;
static OSStatus MixerProcC(void *inRefCon, AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames,
AudioBufferList *ioData) noexcept
{
return static_cast<CoreAudioPlayback*>(inRefCon)->MixerProc(ioActionFlags, inTimeStamp,
inBusNumber, inNumberFrames, ioData);
}
void open(const char *name) override;
bool reset() override;
void start() override;
void stop() override;
AudioUnit mAudioUnit{};
uint mFrameSize{0u};
AudioStreamBasicDescription mFormat{}; // This is the OpenAL format as a CoreAudio ASBD
DEF_NEWDEL(CoreAudioPlayback)
};
CoreAudioPlayback::~CoreAudioPlayback()
{
AudioUnitUninitialize(mAudioUnit);
AudioComponentInstanceDispose(mAudioUnit);
}
OSStatus CoreAudioPlayback::MixerProc(AudioUnitRenderActionFlags*, const AudioTimeStamp*, UInt32,
UInt32, AudioBufferList *ioData) noexcept
{
for(size_t i{0};i < ioData->mNumberBuffers;++i)
{
auto &buffer = ioData->mBuffers[i];
mDevice->renderSamples(buffer.mData, buffer.mDataByteSize/mFrameSize,
buffer.mNumberChannels);
}
return noErr;
}
void CoreAudioPlayback::open(const char *name)
{
if(!name)
name = ca_device;
else if(strcmp(name, ca_device) != 0)
throw al::backend_exception{al::backend_error::NoDevice, "Device name \"%s\" not found",
name};
/* open the default output unit */
AudioComponentDescription desc{};
desc.componentType = kAudioUnitType_Output;
#if TARGET_OS_IOS
desc.componentSubType = kAudioUnitSubType_RemoteIO;
#else
desc.componentSubType = kAudioUnitSubType_DefaultOutput;
#endif
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
AudioComponent comp{AudioComponentFindNext(NULL, &desc)};
if(comp == nullptr)
throw al::backend_exception{al::backend_error::NoDevice, "Could not find audio component"};
OSStatus err{AudioComponentInstanceNew(comp, &mAudioUnit)};
if(err != noErr)
throw al::backend_exception{al::backend_error::NoDevice,
"Could not create component instance: %u", err};
/* init and start the default audio unit... */
err = AudioUnitInitialize(mAudioUnit);
if(err != noErr)
throw al::backend_exception{al::backend_error::DeviceError,
"Could not initialize audio unit: %u", err};
mDevice->DeviceName = name;
}
bool CoreAudioPlayback::reset()
{
OSStatus err{AudioUnitUninitialize(mAudioUnit)};
if(err != noErr)
ERR("-- AudioUnitUninitialize failed.\n");
/* retrieve default output unit's properties (output side) */
AudioStreamBasicDescription streamFormat{};
auto size = static_cast<UInt32>(sizeof(AudioStreamBasicDescription));
err = AudioUnitGetProperty(mAudioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output,
0, &streamFormat, &size);
if(err != noErr || size != sizeof(AudioStreamBasicDescription))
{
ERR("AudioUnitGetProperty failed\n");
return false;
}
#if 0
TRACE("Output streamFormat of default output unit -\n");
TRACE(" streamFormat.mFramesPerPacket = %d\n", streamFormat.mFramesPerPacket);
TRACE(" streamFormat.mChannelsPerFrame = %d\n", streamFormat.mChannelsPerFrame);
TRACE(" streamFormat.mBitsPerChannel = %d\n", streamFormat.mBitsPerChannel);
TRACE(" streamFormat.mBytesPerPacket = %d\n", streamFormat.mBytesPerPacket);
TRACE(" streamFormat.mBytesPerFrame = %d\n", streamFormat.mBytesPerFrame);
TRACE(" streamFormat.mSampleRate = %5.0f\n", streamFormat.mSampleRate);
#endif
/* set default output unit's input side to match output side */
err = AudioUnitSetProperty(mAudioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input,
0, &streamFormat, size);
if(err != noErr)
{
ERR("AudioUnitSetProperty failed\n");
return false;
}
if(mDevice->Frequency != streamFormat.mSampleRate)
{
mDevice->BufferSize = static_cast<uint>(uint64_t{mDevice->BufferSize} *
streamFormat.mSampleRate / mDevice->Frequency);
mDevice->Frequency = static_cast<uint>(streamFormat.mSampleRate);
}
/* FIXME: How to tell what channels are what in the output device, and how
* to specify what we're giving? eg, 6.0 vs 5.1 */
switch(streamFormat.mChannelsPerFrame)
{
case 1:
mDevice->FmtChans = DevFmtMono;
break;
case 2:
mDevice->FmtChans = DevFmtStereo;
break;
case 4:
mDevice->FmtChans = DevFmtQuad;
break;
case 6:
mDevice->FmtChans = DevFmtX51;
break;
case 7:
mDevice->FmtChans = DevFmtX61;
break;
case 8:
mDevice->FmtChans = DevFmtX71;
break;
default:
ERR("Unhandled channel count (%d), using Stereo\n", streamFormat.mChannelsPerFrame);
mDevice->FmtChans = DevFmtStereo;
streamFormat.mChannelsPerFrame = 2;
break;
}
setDefaultWFXChannelOrder();
/* use channel count and sample rate from the default output unit's current
* parameters, but reset everything else */
streamFormat.mFramesPerPacket = 1;
streamFormat.mFormatFlags = 0;
switch(mDevice->FmtType)
{
case DevFmtUByte:
mDevice->FmtType = DevFmtByte;
/* fall-through */
case DevFmtByte:
streamFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger;
streamFormat.mBitsPerChannel = 8;
break;
case DevFmtUShort:
mDevice->FmtType = DevFmtShort;
/* fall-through */
case DevFmtShort:
streamFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger;
streamFormat.mBitsPerChannel = 16;
break;
case DevFmtUInt:
mDevice->FmtType = DevFmtInt;
/* fall-through */
case DevFmtInt:
streamFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger;
streamFormat.mBitsPerChannel = 32;
break;
case DevFmtFloat:
streamFormat.mFormatFlags = kLinearPCMFormatFlagIsFloat;
streamFormat.mBitsPerChannel = 32;
break;
}
streamFormat.mBytesPerFrame = streamFormat.mChannelsPerFrame *
streamFormat.mBitsPerChannel / 8;
streamFormat.mBytesPerPacket = streamFormat.mBytesPerFrame;
streamFormat.mFormatID = kAudioFormatLinearPCM;
streamFormat.mFormatFlags |= kAudioFormatFlagsNativeEndian |
kLinearPCMFormatFlagIsPacked;
err = AudioUnitSetProperty(mAudioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input,
0, &streamFormat, sizeof(AudioStreamBasicDescription));
if(err != noErr)
{
ERR("AudioUnitSetProperty failed\n");
return false;
}
/* setup callback */
mFrameSize = mDevice->frameSizeFromFmt();
AURenderCallbackStruct input{};
input.inputProc = CoreAudioPlayback::MixerProcC;
input.inputProcRefCon = this;
err = AudioUnitSetProperty(mAudioUnit, kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Input, 0, &input, sizeof(AURenderCallbackStruct));
if(err != noErr)
{
ERR("AudioUnitSetProperty failed\n");
return false;
}
/* init the default audio unit... */
err = AudioUnitInitialize(mAudioUnit);
if(err != noErr)
{
ERR("AudioUnitInitialize failed\n");
return false;
}
return true;
}
void CoreAudioPlayback::start()
{
const OSStatus err{AudioOutputUnitStart(mAudioUnit)};
if(err != noErr)
throw al::backend_exception{al::backend_error::DeviceError,
"AudioOutputUnitStart failed: %d", err};
}
void CoreAudioPlayback::stop()
{
OSStatus err{AudioOutputUnitStop(mAudioUnit)};
if(err != noErr)
ERR("AudioOutputUnitStop failed\n");
}
struct CoreAudioCapture final : public BackendBase {
CoreAudioCapture(ALCdevice *device) noexcept : BackendBase{device} { }
~CoreAudioCapture() override;
OSStatus RecordProc(AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber,
UInt32 inNumberFrames, AudioBufferList *ioData) noexcept;
static OSStatus RecordProcC(void *inRefCon, AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames,
AudioBufferList *ioData) noexcept
{
return static_cast<CoreAudioCapture*>(inRefCon)->RecordProc(ioActionFlags, inTimeStamp,
inBusNumber, inNumberFrames, ioData);
}
void open(const char *name) override;
void start() override;
void stop() override;
void captureSamples(al::byte *buffer, uint samples) override;
uint availableSamples() override;
AudioUnit mAudioUnit{0};
uint mFrameSize{0u};
AudioStreamBasicDescription mFormat{}; // This is the OpenAL format as a CoreAudio ASBD
SampleConverterPtr mConverter;
RingBufferPtr mRing{nullptr};
DEF_NEWDEL(CoreAudioCapture)
};
CoreAudioCapture::~CoreAudioCapture()
{
if(mAudioUnit)
AudioComponentInstanceDispose(mAudioUnit);
mAudioUnit = 0;
}
OSStatus CoreAudioCapture::RecordProc(AudioUnitRenderActionFlags*,
const AudioTimeStamp *inTimeStamp, UInt32, UInt32 inNumberFrames,
AudioBufferList*) noexcept
{
AudioUnitRenderActionFlags flags = 0;
union {
al::byte _[sizeof(AudioBufferList) + sizeof(AudioBuffer)*2];
AudioBufferList list;
} audiobuf{};
auto rec_vec = mRing->getWriteVector();
inNumberFrames = static_cast<UInt32>(minz(inNumberFrames,
rec_vec.first.len+rec_vec.second.len));
// Fill the ringbuffer's two segments with data from the input device
if(rec_vec.first.len >= inNumberFrames)
{
audiobuf.list.mNumberBuffers = 1;
audiobuf.list.mBuffers[0].mNumberChannels = mFormat.mChannelsPerFrame;
audiobuf.list.mBuffers[0].mData = rec_vec.first.buf;
audiobuf.list.mBuffers[0].mDataByteSize = inNumberFrames * mFormat.mBytesPerFrame;
}
else
{
const auto remaining = static_cast<uint>(inNumberFrames - rec_vec.first.len);
audiobuf.list.mNumberBuffers = 2;
audiobuf.list.mBuffers[0].mNumberChannels = mFormat.mChannelsPerFrame;
audiobuf.list.mBuffers[0].mData = rec_vec.first.buf;
audiobuf.list.mBuffers[0].mDataByteSize = static_cast<UInt32>(rec_vec.first.len) *
mFormat.mBytesPerFrame;
audiobuf.list.mBuffers[1].mNumberChannels = mFormat.mChannelsPerFrame;
audiobuf.list.mBuffers[1].mData = rec_vec.second.buf;
audiobuf.list.mBuffers[1].mDataByteSize = remaining * mFormat.mBytesPerFrame;
}
OSStatus err{AudioUnitRender(mAudioUnit, &flags, inTimeStamp, audiobuf.list.mNumberBuffers,
inNumberFrames, &audiobuf.list)};
if(err != noErr)
{
ERR("AudioUnitRender error: %d\n", err);
return err;
}
mRing->writeAdvance(inNumberFrames);
return noErr;
}
void CoreAudioCapture::open(const char *name)
{
AudioStreamBasicDescription requestedFormat; // The application requested format
AudioStreamBasicDescription hardwareFormat; // The hardware format
AudioStreamBasicDescription outputFormat; // The AudioUnit output format
AURenderCallbackStruct input;
AudioComponentDescription desc;
UInt32 propertySize;
UInt32 enableIO;
AudioComponent comp;
OSStatus err;
if(!name)
name = ca_device;
else if(strcmp(name, ca_device) != 0)
throw al::backend_exception{al::backend_error::NoDevice, "Device name \"%s\" not found",
name};
desc.componentType = kAudioUnitType_Output;
#if TARGET_OS_IOS
desc.componentSubType = kAudioUnitSubType_RemoteIO;
#else
desc.componentSubType = kAudioUnitSubType_HALOutput;
#endif
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
// Search for component with given description
comp = AudioComponentFindNext(NULL, &desc);
if(comp == NULL)
throw al::backend_exception{al::backend_error::NoDevice, "Could not find audio component"};
// Open the component
err = AudioComponentInstanceNew(comp, &mAudioUnit);
if(err != noErr)
throw al::backend_exception{al::backend_error::NoDevice,
"Could not create component instance: %u", err};
// Turn off AudioUnit output
enableIO = 0;
err = AudioUnitSetProperty(mAudioUnit, kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Output, 0, &enableIO, sizeof(enableIO));
if(err != noErr)
throw al::backend_exception{al::backend_error::DeviceError,
"Could not disable audio unit output property: %u", err};
// Turn on AudioUnit input
enableIO = 1;
err = AudioUnitSetProperty(mAudioUnit, kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input, 1, &enableIO, sizeof(enableIO));
if(err != noErr)
throw al::backend_exception{al::backend_error::DeviceError,
"Could not enable audio unit input property: %u", err};
#if !TARGET_OS_IOS
{
// Get the default input device
AudioDeviceID inputDevice = kAudioDeviceUnknown;
propertySize = sizeof(AudioDeviceID);
AudioObjectPropertyAddress propertyAddress{};
propertyAddress.mSelector = kAudioHardwarePropertyDefaultInputDevice;
propertyAddress.mScope = kAudioObjectPropertyScopeGlobal;
propertyAddress.mElement = kAudioObjectPropertyElementMaster;
err = AudioObjectGetPropertyData(kAudioObjectSystemObject, &propertyAddress, 0, nullptr,
&propertySize, &inputDevice);
if(err != noErr)
throw al::backend_exception{al::backend_error::NoDevice,
"Could not get input device: %u", err};
if(inputDevice == kAudioDeviceUnknown)
throw al::backend_exception{al::backend_error::NoDevice, "Unknown input device"};
// Track the input device
err = AudioUnitSetProperty(mAudioUnit, kAudioOutputUnitProperty_CurrentDevice,
kAudioUnitScope_Global, 0, &inputDevice, sizeof(AudioDeviceID));
if(err != noErr)
throw al::backend_exception{al::backend_error::NoDevice,
"Could not set input device: %u", err};
}
#endif
// set capture callback
input.inputProc = CoreAudioCapture::RecordProcC;
input.inputProcRefCon = this;
err = AudioUnitSetProperty(mAudioUnit, kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global, 0, &input, sizeof(AURenderCallbackStruct));
if(err != noErr)
throw al::backend_exception{al::backend_error::DeviceError,
"Could not set capture callback: %u", err};
// Disable buffer allocation for capture
UInt32 flag{0};
err = AudioUnitSetProperty(mAudioUnit, kAudioUnitProperty_ShouldAllocateBuffer,
kAudioUnitScope_Output, 1, &flag, sizeof(flag));
if(err != noErr)
throw al::backend_exception{al::backend_error::DeviceError,
"Could not disable buffer allocation property: %u", err};
// Initialize the device
err = AudioUnitInitialize(mAudioUnit);
if(err != noErr)
throw al::backend_exception{al::backend_error::DeviceError,
"Could not initialize audio unit: %u", err};
// Get the hardware format
propertySize = sizeof(AudioStreamBasicDescription);
err = AudioUnitGetProperty(mAudioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input,
1, &hardwareFormat, &propertySize);
if(err != noErr || propertySize != sizeof(AudioStreamBasicDescription))
throw al::backend_exception{al::backend_error::DeviceError,
"Could not get input format: %u", err};
// Set up the requested format description
switch(mDevice->FmtType)
{
case DevFmtByte:
requestedFormat.mBitsPerChannel = 8;
requestedFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
break;
case DevFmtUByte:
requestedFormat.mBitsPerChannel = 8;
requestedFormat.mFormatFlags = kAudioFormatFlagIsPacked;
break;
case DevFmtShort:
requestedFormat.mBitsPerChannel = 16;
requestedFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked;
break;
case DevFmtUShort:
requestedFormat.mBitsPerChannel = 16;
requestedFormat.mFormatFlags = kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked;
break;
case DevFmtInt:
requestedFormat.mBitsPerChannel = 32;
requestedFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked;
break;
case DevFmtUInt:
requestedFormat.mBitsPerChannel = 32;
requestedFormat.mFormatFlags = kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked;
break;
case DevFmtFloat:
requestedFormat.mBitsPerChannel = 32;
requestedFormat.mFormatFlags = kLinearPCMFormatFlagIsFloat | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked;
break;
}
switch(mDevice->FmtChans)
{
case DevFmtMono:
requestedFormat.mChannelsPerFrame = 1;
break;
case DevFmtStereo:
requestedFormat.mChannelsPerFrame = 2;
break;
case DevFmtQuad:
case DevFmtX51:
case DevFmtX51Rear:
case DevFmtX61:
case DevFmtX71:
case DevFmtAmbi3D:
throw al::backend_exception{al::backend_error::DeviceError, "%s not supported",
DevFmtChannelsString(mDevice->FmtChans)};
}
requestedFormat.mBytesPerFrame = requestedFormat.mChannelsPerFrame * requestedFormat.mBitsPerChannel / 8;
requestedFormat.mBytesPerPacket = requestedFormat.mBytesPerFrame;
requestedFormat.mSampleRate = mDevice->Frequency;
requestedFormat.mFormatID = kAudioFormatLinearPCM;
requestedFormat.mReserved = 0;
requestedFormat.mFramesPerPacket = 1;
// save requested format description for later use
mFormat = requestedFormat;
mFrameSize = mDevice->frameSizeFromFmt();
// Use intermediate format for sample rate conversion (outputFormat)
// Set sample rate to the same as hardware for resampling later
outputFormat = requestedFormat;
outputFormat.mSampleRate = hardwareFormat.mSampleRate;
// The output format should be the requested format, but using the hardware sample rate
// This is because the AudioUnit will automatically scale other properties, except for sample rate
err = AudioUnitSetProperty(mAudioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output,
1, &outputFormat, sizeof(outputFormat));
if(err != noErr)
throw al::backend_exception{al::backend_error::DeviceError,
"Could not set input format: %u", err};
/* Calculate the minimum AudioUnit output format frame count for the pre-
* conversion ring buffer. Ensure at least 100ms for the total buffer.
*/
double srateScale{double{outputFormat.mSampleRate} / mDevice->Frequency};
auto FrameCount64 = maxu64(static_cast<uint64_t>(std::ceil(mDevice->BufferSize*srateScale)),
static_cast<UInt32>(outputFormat.mSampleRate)/10);
FrameCount64 += MaxResamplerPadding;
if(FrameCount64 > std::numeric_limits<int32_t>::max())
throw al::backend_exception{al::backend_error::DeviceError,
"Calculated frame count is too large: %" PRIu64, FrameCount64};
UInt32 outputFrameCount{};
propertySize = sizeof(outputFrameCount);
err = AudioUnitGetProperty(mAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice,
kAudioUnitScope_Global, 0, &outputFrameCount, &propertySize);
if(err != noErr || propertySize != sizeof(outputFrameCount))
throw al::backend_exception{al::backend_error::DeviceError,
"Could not get input frame count: %u", err};
outputFrameCount = static_cast<UInt32>(maxu64(outputFrameCount, FrameCount64));
mRing = RingBuffer::Create(outputFrameCount, mFrameSize, false);
/* Set up sample converter if needed */
if(outputFormat.mSampleRate != mDevice->Frequency)
mConverter = CreateSampleConverter(mDevice->FmtType, mDevice->FmtType,
mFormat.mChannelsPerFrame, static_cast<uint>(hardwareFormat.mSampleRate),
mDevice->Frequency, Resampler::FastBSinc24);
mDevice->DeviceName = name;
}
void CoreAudioCapture::start()
{
OSStatus err{AudioOutputUnitStart(mAudioUnit)};
if(err != noErr)
throw al::backend_exception{al::backend_error::DeviceError,
"AudioOutputUnitStart failed: %d", err};
}
void CoreAudioCapture::stop()
{
OSStatus err{AudioOutputUnitStop(mAudioUnit)};
if(err != noErr)
ERR("AudioOutputUnitStop failed\n");
}
void CoreAudioCapture::captureSamples(al::byte *buffer, uint samples)
{
if(!mConverter)
{
mRing->read(buffer, samples);
return;
}
auto rec_vec = mRing->getReadVector();
const void *src0{rec_vec.first.buf};
auto src0len = static_cast<uint>(rec_vec.first.len);
uint got{mConverter->convert(&src0, &src0len, buffer, samples)};
size_t total_read{rec_vec.first.len - src0len};
if(got < samples && !src0len && rec_vec.second.len > 0)
{
const void *src1{rec_vec.second.buf};
auto src1len = static_cast<uint>(rec_vec.second.len);
got += mConverter->convert(&src1, &src1len, buffer + got*mFrameSize, samples-got);
total_read += rec_vec.second.len - src1len;
}
mRing->readAdvance(total_read);
}
uint CoreAudioCapture::availableSamples()
{
if(!mConverter) return static_cast<uint>(mRing->readSpace());
return mConverter->availableOut(static_cast<uint>(mRing->readSpace()));
}
} // namespace
BackendFactory &CoreAudioBackendFactory::getFactory()
{
static CoreAudioBackendFactory factory{};
return factory;
}
bool CoreAudioBackendFactory::init() { return true; }
bool CoreAudioBackendFactory::querySupport(BackendType type)
{ return type == BackendType::Playback || type == BackendType::Capture; }
std::string CoreAudioBackendFactory::probe(BackendType type)
{
std::string outnames;
switch(type)
{
case BackendType::Playback:
case BackendType::Capture:
/* Includes null char. */
outnames.append(ca_device, sizeof(ca_device));
break;
}
return outnames;
}
BackendPtr CoreAudioBackendFactory::createBackend(ALCdevice *device, BackendType type)
{
if(type == BackendType::Playback)
return BackendPtr{new CoreAudioPlayback{device}};
if(type == BackendType::Capture)
return BackendPtr{new CoreAudioCapture{device}};
return nullptr;
}

View file

@ -0,0 +1,19 @@
#ifndef BACKENDS_COREAUDIO_H
#define BACKENDS_COREAUDIO_H
#include "backends/base.h"
struct CoreAudioBackendFactory final : public BackendFactory {
public:
bool init() override;
bool querySupport(BackendType type) override;
std::string probe(BackendType type) override;
BackendPtr createBackend(ALCdevice *device, BackendType type) override;
static BackendFactory &getFactory();
};
#endif /* BACKENDS_COREAUDIO_H */

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,868 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 1999-2007 by authors.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include "backends/dsound.h"
#define WIN32_LEAN_AND_MEAN
#include <windows.h>
#include <stdlib.h>
#include <stdio.h>
#include <memory.h>
#include <cguid.h>
#include <mmreg.h>
#ifndef _WAVEFORMATEXTENSIBLE_
#include <ks.h>
#include <ksmedia.h>
#endif
#include <atomic>
#include <cassert>
#include <thread>
#include <string>
#include <vector>
#include <algorithm>
#include <functional>
#include "alcmain.h"
#include "alu.h"
#include "compat.h"
#include "core/logging.h"
#include "dynload.h"
#include "ringbuffer.h"
#include "strutils.h"
#include "threads.h"
/* MinGW-w64 needs this for some unknown reason now. */
using LPCWAVEFORMATEX = const WAVEFORMATEX*;
#include <dsound.h>
#ifndef DSSPEAKER_5POINT1
# define DSSPEAKER_5POINT1 0x00000006
#endif
#ifndef DSSPEAKER_5POINT1_BACK
# define DSSPEAKER_5POINT1_BACK 0x00000006
#endif
#ifndef DSSPEAKER_7POINT1
# define DSSPEAKER_7POINT1 0x00000007
#endif
#ifndef DSSPEAKER_7POINT1_SURROUND
# define DSSPEAKER_7POINT1_SURROUND 0x00000008
#endif
#ifndef DSSPEAKER_5POINT1_SURROUND
# define DSSPEAKER_5POINT1_SURROUND 0x00000009
#endif
/* Some headers seem to define these as macros for __uuidof, which is annoying
* since some headers don't declare them at all. Hopefully the ifdef is enough
* to tell if they need to be declared.
*/
#ifndef KSDATAFORMAT_SUBTYPE_PCM
DEFINE_GUID(KSDATAFORMAT_SUBTYPE_PCM, 0x00000001, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
#endif
#ifndef KSDATAFORMAT_SUBTYPE_IEEE_FLOAT
DEFINE_GUID(KSDATAFORMAT_SUBTYPE_IEEE_FLOAT, 0x00000003, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
#endif
namespace {
#define DEVNAME_HEAD "OpenAL Soft on "
#ifdef HAVE_DYNLOAD
void *ds_handle;
HRESULT (WINAPI *pDirectSoundCreate)(const GUID *pcGuidDevice, IDirectSound **ppDS, IUnknown *pUnkOuter);
HRESULT (WINAPI *pDirectSoundEnumerateW)(LPDSENUMCALLBACKW pDSEnumCallback, void *pContext);
HRESULT (WINAPI *pDirectSoundCaptureCreate)(const GUID *pcGuidDevice, IDirectSoundCapture **ppDSC, IUnknown *pUnkOuter);
HRESULT (WINAPI *pDirectSoundCaptureEnumerateW)(LPDSENUMCALLBACKW pDSEnumCallback, void *pContext);
#ifndef IN_IDE_PARSER
#define DirectSoundCreate pDirectSoundCreate
#define DirectSoundEnumerateW pDirectSoundEnumerateW
#define DirectSoundCaptureCreate pDirectSoundCaptureCreate
#define DirectSoundCaptureEnumerateW pDirectSoundCaptureEnumerateW
#endif
#endif
#define MONO SPEAKER_FRONT_CENTER
#define STEREO (SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT)
#define QUAD (SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT|SPEAKER_BACK_LEFT|SPEAKER_BACK_RIGHT)
#define X5DOT1 (SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT|SPEAKER_FRONT_CENTER|SPEAKER_LOW_FREQUENCY|SPEAKER_SIDE_LEFT|SPEAKER_SIDE_RIGHT)
#define X5DOT1REAR (SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT|SPEAKER_FRONT_CENTER|SPEAKER_LOW_FREQUENCY|SPEAKER_BACK_LEFT|SPEAKER_BACK_RIGHT)
#define X6DOT1 (SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT|SPEAKER_FRONT_CENTER|SPEAKER_LOW_FREQUENCY|SPEAKER_BACK_CENTER|SPEAKER_SIDE_LEFT|SPEAKER_SIDE_RIGHT)
#define X7DOT1 (SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT|SPEAKER_FRONT_CENTER|SPEAKER_LOW_FREQUENCY|SPEAKER_BACK_LEFT|SPEAKER_BACK_RIGHT|SPEAKER_SIDE_LEFT|SPEAKER_SIDE_RIGHT)
#define MAX_UPDATES 128
struct DevMap {
std::string name;
GUID guid;
template<typename T0, typename T1>
DevMap(T0&& name_, T1&& guid_)
: name{std::forward<T0>(name_)}, guid{std::forward<T1>(guid_)}
{ }
};
al::vector<DevMap> PlaybackDevices;
al::vector<DevMap> CaptureDevices;
bool checkName(const al::vector<DevMap> &list, const std::string &name)
{
auto match_name = [&name](const DevMap &entry) -> bool
{ return entry.name == name; };
return std::find_if(list.cbegin(), list.cend(), match_name) != list.cend();
}
BOOL CALLBACK DSoundEnumDevices(GUID *guid, const WCHAR *desc, const WCHAR*, void *data) noexcept
{
if(!guid)
return TRUE;
auto& devices = *static_cast<al::vector<DevMap>*>(data);
const std::string basename{DEVNAME_HEAD + wstr_to_utf8(desc)};
int count{1};
std::string newname{basename};
while(checkName(devices, newname))
{
newname = basename;
newname += " #";
newname += std::to_string(++count);
}
devices.emplace_back(std::move(newname), *guid);
const DevMap &newentry = devices.back();
OLECHAR *guidstr{nullptr};
HRESULT hr{StringFromCLSID(*guid, &guidstr)};
if(SUCCEEDED(hr))
{
TRACE("Got device \"%s\", GUID \"%ls\"\n", newentry.name.c_str(), guidstr);
CoTaskMemFree(guidstr);
}
return TRUE;
}
struct DSoundPlayback final : public BackendBase {
DSoundPlayback(ALCdevice *device) noexcept : BackendBase{device} { }
~DSoundPlayback() override;
int mixerProc();
void open(const ALCchar *name) override;
bool reset() override;
void start() override;
void stop() override;
IDirectSound *mDS{nullptr};
IDirectSoundBuffer *mPrimaryBuffer{nullptr};
IDirectSoundBuffer *mBuffer{nullptr};
IDirectSoundNotify *mNotifies{nullptr};
HANDLE mNotifyEvent{nullptr};
std::atomic<bool> mKillNow{true};
std::thread mThread;
DEF_NEWDEL(DSoundPlayback)
};
DSoundPlayback::~DSoundPlayback()
{
if(mNotifies)
mNotifies->Release();
mNotifies = nullptr;
if(mBuffer)
mBuffer->Release();
mBuffer = nullptr;
if(mPrimaryBuffer)
mPrimaryBuffer->Release();
mPrimaryBuffer = nullptr;
if(mDS)
mDS->Release();
mDS = nullptr;
if(mNotifyEvent)
CloseHandle(mNotifyEvent);
mNotifyEvent = nullptr;
}
FORCE_ALIGN int DSoundPlayback::mixerProc()
{
SetRTPriority();
althrd_setname(MIXER_THREAD_NAME);
DSBCAPS DSBCaps{};
DSBCaps.dwSize = sizeof(DSBCaps);
HRESULT err{mBuffer->GetCaps(&DSBCaps)};
if(FAILED(err))
{
ERR("Failed to get buffer caps: 0x%lx\n", err);
mDevice->handleDisconnect("Failure retrieving playback buffer info: 0x%lx", err);
return 1;
}
const size_t FrameStep{mDevice->channelsFromFmt()};
uint FrameSize{mDevice->frameSizeFromFmt()};
DWORD FragSize{mDevice->UpdateSize * FrameSize};
bool Playing{false};
DWORD LastCursor{0u};
mBuffer->GetCurrentPosition(&LastCursor, nullptr);
while(!mKillNow.load(std::memory_order_acquire) &&
mDevice->Connected.load(std::memory_order_acquire))
{
// Get current play cursor
DWORD PlayCursor;
mBuffer->GetCurrentPosition(&PlayCursor, nullptr);
DWORD avail = (PlayCursor-LastCursor+DSBCaps.dwBufferBytes) % DSBCaps.dwBufferBytes;
if(avail < FragSize)
{
if(!Playing)
{
err = mBuffer->Play(0, 0, DSBPLAY_LOOPING);
if(FAILED(err))
{
ERR("Failed to play buffer: 0x%lx\n", err);
mDevice->handleDisconnect("Failure starting playback: 0x%lx", err);
return 1;
}
Playing = true;
}
avail = WaitForSingleObjectEx(mNotifyEvent, 2000, FALSE);
if(avail != WAIT_OBJECT_0)
ERR("WaitForSingleObjectEx error: 0x%lx\n", avail);
continue;
}
avail -= avail%FragSize;
// Lock output buffer
void *WritePtr1, *WritePtr2;
DWORD WriteCnt1{0u}, WriteCnt2{0u};
err = mBuffer->Lock(LastCursor, avail, &WritePtr1, &WriteCnt1, &WritePtr2, &WriteCnt2, 0);
// If the buffer is lost, restore it and lock
if(err == DSERR_BUFFERLOST)
{
WARN("Buffer lost, restoring...\n");
err = mBuffer->Restore();
if(SUCCEEDED(err))
{
Playing = false;
LastCursor = 0;
err = mBuffer->Lock(0, DSBCaps.dwBufferBytes, &WritePtr1, &WriteCnt1,
&WritePtr2, &WriteCnt2, 0);
}
}
if(SUCCEEDED(err))
{
mDevice->renderSamples(WritePtr1, WriteCnt1/FrameSize, FrameStep);
if(WriteCnt2 > 0)
mDevice->renderSamples(WritePtr2, WriteCnt2/FrameSize, FrameStep);
mBuffer->Unlock(WritePtr1, WriteCnt1, WritePtr2, WriteCnt2);
}
else
{
ERR("Buffer lock error: %#lx\n", err);
mDevice->handleDisconnect("Failed to lock output buffer: 0x%lx", err);
return 1;
}
// Update old write cursor location
LastCursor += WriteCnt1+WriteCnt2;
LastCursor %= DSBCaps.dwBufferBytes;
}
return 0;
}
void DSoundPlayback::open(const char *name)
{
HRESULT hr;
if(PlaybackDevices.empty())
{
/* Initialize COM to prevent name truncation */
HRESULT hrcom{CoInitialize(nullptr)};
hr = DirectSoundEnumerateW(DSoundEnumDevices, &PlaybackDevices);
if(FAILED(hr))
ERR("Error enumerating DirectSound devices (0x%lx)!\n", hr);
if(SUCCEEDED(hrcom))
CoUninitialize();
}
const GUID *guid{nullptr};
if(!name && !PlaybackDevices.empty())
{
name = PlaybackDevices[0].name.c_str();
guid = &PlaybackDevices[0].guid;
}
else
{
auto iter = std::find_if(PlaybackDevices.cbegin(), PlaybackDevices.cend(),
[name](const DevMap &entry) -> bool { return entry.name == name; });
if(iter == PlaybackDevices.cend())
{
GUID id{};
hr = CLSIDFromString(utf8_to_wstr(name).c_str(), &id);
if(SUCCEEDED(hr))
iter = std::find_if(PlaybackDevices.cbegin(), PlaybackDevices.cend(),
[&id](const DevMap &entry) -> bool { return entry.guid == id; });
if(iter == PlaybackDevices.cend())
throw al::backend_exception{al::backend_error::NoDevice,
"Device name \"%s\" not found", name};
}
guid = &iter->guid;
}
hr = DS_OK;
mNotifyEvent = CreateEventW(nullptr, FALSE, FALSE, nullptr);
if(!mNotifyEvent) hr = E_FAIL;
//DirectSound Init code
if(SUCCEEDED(hr))
hr = DirectSoundCreate(guid, &mDS, nullptr);
if(SUCCEEDED(hr))
hr = mDS->SetCooperativeLevel(GetForegroundWindow(), DSSCL_PRIORITY);
if(FAILED(hr))
throw al::backend_exception{al::backend_error::DeviceError, "Device init failed: 0x%08lx",
hr};
mDevice->DeviceName = name;
}
bool DSoundPlayback::reset()
{
if(mNotifies)
mNotifies->Release();
mNotifies = nullptr;
if(mBuffer)
mBuffer->Release();
mBuffer = nullptr;
if(mPrimaryBuffer)
mPrimaryBuffer->Release();
mPrimaryBuffer = nullptr;
switch(mDevice->FmtType)
{
case DevFmtByte:
mDevice->FmtType = DevFmtUByte;
break;
case DevFmtFloat:
if(mDevice->Flags.test(SampleTypeRequest))
break;
/* fall-through */
case DevFmtUShort:
mDevice->FmtType = DevFmtShort;
break;
case DevFmtUInt:
mDevice->FmtType = DevFmtInt;
break;
case DevFmtUByte:
case DevFmtShort:
case DevFmtInt:
break;
}
WAVEFORMATEXTENSIBLE OutputType{};
DWORD speakers;
HRESULT hr{mDS->GetSpeakerConfig(&speakers)};
if(SUCCEEDED(hr))
{
speakers = DSSPEAKER_CONFIG(speakers);
if(!mDevice->Flags.test(ChannelsRequest))
{
if(speakers == DSSPEAKER_MONO)
mDevice->FmtChans = DevFmtMono;
else if(speakers == DSSPEAKER_STEREO || speakers == DSSPEAKER_HEADPHONE)
mDevice->FmtChans = DevFmtStereo;
else if(speakers == DSSPEAKER_QUAD)
mDevice->FmtChans = DevFmtQuad;
else if(speakers == DSSPEAKER_5POINT1_SURROUND)
mDevice->FmtChans = DevFmtX51;
else if(speakers == DSSPEAKER_5POINT1_BACK)
mDevice->FmtChans = DevFmtX51Rear;
else if(speakers == DSSPEAKER_7POINT1 || speakers == DSSPEAKER_7POINT1_SURROUND)
mDevice->FmtChans = DevFmtX71;
else
ERR("Unknown system speaker config: 0x%lx\n", speakers);
}
mDevice->IsHeadphones = mDevice->FmtChans == DevFmtStereo
&& speakers == DSSPEAKER_HEADPHONE;
switch(mDevice->FmtChans)
{
case DevFmtMono: OutputType.dwChannelMask = MONO; break;
case DevFmtAmbi3D: mDevice->FmtChans = DevFmtStereo;
/*fall-through*/
case DevFmtStereo: OutputType.dwChannelMask = STEREO; break;
case DevFmtQuad: OutputType.dwChannelMask = QUAD; break;
case DevFmtX51: OutputType.dwChannelMask = X5DOT1; break;
case DevFmtX51Rear: OutputType.dwChannelMask = X5DOT1REAR; break;
case DevFmtX61: OutputType.dwChannelMask = X6DOT1; break;
case DevFmtX71: OutputType.dwChannelMask = X7DOT1; break;
}
retry_open:
hr = S_OK;
OutputType.Format.wFormatTag = WAVE_FORMAT_PCM;
OutputType.Format.nChannels = static_cast<WORD>(mDevice->channelsFromFmt());
OutputType.Format.wBitsPerSample = static_cast<WORD>(mDevice->bytesFromFmt() * 8);
OutputType.Format.nBlockAlign = static_cast<WORD>(OutputType.Format.nChannels *
OutputType.Format.wBitsPerSample / 8);
OutputType.Format.nSamplesPerSec = mDevice->Frequency;
OutputType.Format.nAvgBytesPerSec = OutputType.Format.nSamplesPerSec *
OutputType.Format.nBlockAlign;
OutputType.Format.cbSize = 0;
}
if(OutputType.Format.nChannels > 2 || mDevice->FmtType == DevFmtFloat)
{
OutputType.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE;
OutputType.Samples.wValidBitsPerSample = OutputType.Format.wBitsPerSample;
OutputType.Format.cbSize = sizeof(WAVEFORMATEXTENSIBLE) - sizeof(WAVEFORMATEX);
if(mDevice->FmtType == DevFmtFloat)
OutputType.SubFormat = KSDATAFORMAT_SUBTYPE_IEEE_FLOAT;
else
OutputType.SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
if(mPrimaryBuffer)
mPrimaryBuffer->Release();
mPrimaryBuffer = nullptr;
}
else
{
if(SUCCEEDED(hr) && !mPrimaryBuffer)
{
DSBUFFERDESC DSBDescription{};
DSBDescription.dwSize = sizeof(DSBDescription);
DSBDescription.dwFlags = DSBCAPS_PRIMARYBUFFER;
hr = mDS->CreateSoundBuffer(&DSBDescription, &mPrimaryBuffer, nullptr);
}
if(SUCCEEDED(hr))
hr = mPrimaryBuffer->SetFormat(&OutputType.Format);
}
if(SUCCEEDED(hr))
{
uint num_updates{mDevice->BufferSize / mDevice->UpdateSize};
if(num_updates > MAX_UPDATES)
num_updates = MAX_UPDATES;
mDevice->BufferSize = mDevice->UpdateSize * num_updates;
DSBUFFERDESC DSBDescription{};
DSBDescription.dwSize = sizeof(DSBDescription);
DSBDescription.dwFlags = DSBCAPS_CTRLPOSITIONNOTIFY | DSBCAPS_GETCURRENTPOSITION2
| DSBCAPS_GLOBALFOCUS;
DSBDescription.dwBufferBytes = mDevice->BufferSize * OutputType.Format.nBlockAlign;
DSBDescription.lpwfxFormat = &OutputType.Format;
hr = mDS->CreateSoundBuffer(&DSBDescription, &mBuffer, nullptr);
if(FAILED(hr) && mDevice->FmtType == DevFmtFloat)
{
mDevice->FmtType = DevFmtShort;
goto retry_open;
}
}
if(SUCCEEDED(hr))
{
void *ptr;
hr = mBuffer->QueryInterface(IID_IDirectSoundNotify, &ptr);
if(SUCCEEDED(hr))
{
mNotifies = static_cast<IDirectSoundNotify*>(ptr);
uint num_updates{mDevice->BufferSize / mDevice->UpdateSize};
assert(num_updates <= MAX_UPDATES);
std::array<DSBPOSITIONNOTIFY,MAX_UPDATES> nots;
for(uint i{0};i < num_updates;++i)
{
nots[i].dwOffset = i * mDevice->UpdateSize * OutputType.Format.nBlockAlign;
nots[i].hEventNotify = mNotifyEvent;
}
if(mNotifies->SetNotificationPositions(num_updates, nots.data()) != DS_OK)
hr = E_FAIL;
}
}
if(FAILED(hr))
{
if(mNotifies)
mNotifies->Release();
mNotifies = nullptr;
if(mBuffer)
mBuffer->Release();
mBuffer = nullptr;
if(mPrimaryBuffer)
mPrimaryBuffer->Release();
mPrimaryBuffer = nullptr;
return false;
}
ResetEvent(mNotifyEvent);
setChannelOrderFromWFXMask(OutputType.dwChannelMask);
return true;
}
void DSoundPlayback::start()
{
try {
mKillNow.store(false, std::memory_order_release);
mThread = std::thread{std::mem_fn(&DSoundPlayback::mixerProc), this};
}
catch(std::exception& e) {
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to start mixing thread: %s", e.what()};
}
}
void DSoundPlayback::stop()
{
if(mKillNow.exchange(true, std::memory_order_acq_rel) || !mThread.joinable())
return;
mThread.join();
mBuffer->Stop();
}
struct DSoundCapture final : public BackendBase {
DSoundCapture(ALCdevice *device) noexcept : BackendBase{device} { }
~DSoundCapture() override;
void open(const char *name) override;
void start() override;
void stop() override;
void captureSamples(al::byte *buffer, uint samples) override;
uint availableSamples() override;
IDirectSoundCapture *mDSC{nullptr};
IDirectSoundCaptureBuffer *mDSCbuffer{nullptr};
DWORD mBufferBytes{0u};
DWORD mCursor{0u};
RingBufferPtr mRing;
DEF_NEWDEL(DSoundCapture)
};
DSoundCapture::~DSoundCapture()
{
if(mDSCbuffer)
{
mDSCbuffer->Stop();
mDSCbuffer->Release();
mDSCbuffer = nullptr;
}
if(mDSC)
mDSC->Release();
mDSC = nullptr;
}
void DSoundCapture::open(const char *name)
{
HRESULT hr;
if(CaptureDevices.empty())
{
/* Initialize COM to prevent name truncation */
HRESULT hrcom{CoInitialize(nullptr)};
hr = DirectSoundCaptureEnumerateW(DSoundEnumDevices, &CaptureDevices);
if(FAILED(hr))
ERR("Error enumerating DirectSound devices (0x%lx)!\n", hr);
if(SUCCEEDED(hrcom))
CoUninitialize();
}
const GUID *guid{nullptr};
if(!name && !CaptureDevices.empty())
{
name = CaptureDevices[0].name.c_str();
guid = &CaptureDevices[0].guid;
}
else
{
auto iter = std::find_if(CaptureDevices.cbegin(), CaptureDevices.cend(),
[name](const DevMap &entry) -> bool { return entry.name == name; });
if(iter == CaptureDevices.cend())
{
GUID id{};
hr = CLSIDFromString(utf8_to_wstr(name).c_str(), &id);
if(SUCCEEDED(hr))
iter = std::find_if(CaptureDevices.cbegin(), CaptureDevices.cend(),
[&id](const DevMap &entry) -> bool { return entry.guid == id; });
if(iter == CaptureDevices.cend())
throw al::backend_exception{al::backend_error::NoDevice,
"Device name \"%s\" not found", name};
}
guid = &iter->guid;
}
switch(mDevice->FmtType)
{
case DevFmtByte:
case DevFmtUShort:
case DevFmtUInt:
WARN("%s capture samples not supported\n", DevFmtTypeString(mDevice->FmtType));
throw al::backend_exception{al::backend_error::DeviceError,
"%s capture samples not supported", DevFmtTypeString(mDevice->FmtType)};
case DevFmtUByte:
case DevFmtShort:
case DevFmtInt:
case DevFmtFloat:
break;
}
WAVEFORMATEXTENSIBLE InputType{};
switch(mDevice->FmtChans)
{
case DevFmtMono: InputType.dwChannelMask = MONO; break;
case DevFmtStereo: InputType.dwChannelMask = STEREO; break;
case DevFmtQuad: InputType.dwChannelMask = QUAD; break;
case DevFmtX51: InputType.dwChannelMask = X5DOT1; break;
case DevFmtX51Rear: InputType.dwChannelMask = X5DOT1REAR; break;
case DevFmtX61: InputType.dwChannelMask = X6DOT1; break;
case DevFmtX71: InputType.dwChannelMask = X7DOT1; break;
case DevFmtAmbi3D:
WARN("%s capture not supported\n", DevFmtChannelsString(mDevice->FmtChans));
throw al::backend_exception{al::backend_error::DeviceError, "%s capture not supported",
DevFmtChannelsString(mDevice->FmtChans)};
}
InputType.Format.wFormatTag = WAVE_FORMAT_PCM;
InputType.Format.nChannels = static_cast<WORD>(mDevice->channelsFromFmt());
InputType.Format.wBitsPerSample = static_cast<WORD>(mDevice->bytesFromFmt() * 8);
InputType.Format.nBlockAlign = static_cast<WORD>(InputType.Format.nChannels *
InputType.Format.wBitsPerSample / 8);
InputType.Format.nSamplesPerSec = mDevice->Frequency;
InputType.Format.nAvgBytesPerSec = InputType.Format.nSamplesPerSec *
InputType.Format.nBlockAlign;
InputType.Format.cbSize = 0;
InputType.Samples.wValidBitsPerSample = InputType.Format.wBitsPerSample;
if(mDevice->FmtType == DevFmtFloat)
InputType.SubFormat = KSDATAFORMAT_SUBTYPE_IEEE_FLOAT;
else
InputType.SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
if(InputType.Format.nChannels > 2 || mDevice->FmtType == DevFmtFloat)
{
InputType.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE;
InputType.Format.cbSize = sizeof(WAVEFORMATEXTENSIBLE) - sizeof(WAVEFORMATEX);
}
uint samples{mDevice->BufferSize};
samples = maxu(samples, 100 * mDevice->Frequency / 1000);
DSCBUFFERDESC DSCBDescription{};
DSCBDescription.dwSize = sizeof(DSCBDescription);
DSCBDescription.dwFlags = 0;
DSCBDescription.dwBufferBytes = samples * InputType.Format.nBlockAlign;
DSCBDescription.lpwfxFormat = &InputType.Format;
//DirectSoundCapture Init code
hr = DirectSoundCaptureCreate(guid, &mDSC, nullptr);
if(SUCCEEDED(hr))
mDSC->CreateCaptureBuffer(&DSCBDescription, &mDSCbuffer, nullptr);
if(SUCCEEDED(hr))
mRing = RingBuffer::Create(mDevice->BufferSize, InputType.Format.nBlockAlign, false);
if(FAILED(hr))
{
mRing = nullptr;
if(mDSCbuffer)
mDSCbuffer->Release();
mDSCbuffer = nullptr;
if(mDSC)
mDSC->Release();
mDSC = nullptr;
throw al::backend_exception{al::backend_error::DeviceError, "Device init failed: 0x%08lx",
hr};
}
mBufferBytes = DSCBDescription.dwBufferBytes;
setChannelOrderFromWFXMask(InputType.dwChannelMask);
mDevice->DeviceName = name;
}
void DSoundCapture::start()
{
const HRESULT hr{mDSCbuffer->Start(DSCBSTART_LOOPING)};
if(FAILED(hr))
throw al::backend_exception{al::backend_error::DeviceError,
"Failure starting capture: 0x%lx", hr};
}
void DSoundCapture::stop()
{
HRESULT hr{mDSCbuffer->Stop()};
if(FAILED(hr))
{
ERR("stop failed: 0x%08lx\n", hr);
mDevice->handleDisconnect("Failure stopping capture: 0x%lx", hr);
}
}
void DSoundCapture::captureSamples(al::byte *buffer, uint samples)
{ mRing->read(buffer, samples); }
uint DSoundCapture::availableSamples()
{
if(!mDevice->Connected.load(std::memory_order_acquire))
return static_cast<uint>(mRing->readSpace());
const uint FrameSize{mDevice->frameSizeFromFmt()};
const DWORD BufferBytes{mBufferBytes};
const DWORD LastCursor{mCursor};
DWORD ReadCursor{};
void *ReadPtr1{}, *ReadPtr2{};
DWORD ReadCnt1{}, ReadCnt2{};
HRESULT hr{mDSCbuffer->GetCurrentPosition(nullptr, &ReadCursor)};
if(SUCCEEDED(hr))
{
const DWORD NumBytes{(BufferBytes+ReadCursor-LastCursor) % BufferBytes};
if(!NumBytes) return static_cast<uint>(mRing->readSpace());
hr = mDSCbuffer->Lock(LastCursor, NumBytes, &ReadPtr1, &ReadCnt1, &ReadPtr2, &ReadCnt2, 0);
}
if(SUCCEEDED(hr))
{
mRing->write(ReadPtr1, ReadCnt1/FrameSize);
if(ReadPtr2 != nullptr && ReadCnt2 > 0)
mRing->write(ReadPtr2, ReadCnt2/FrameSize);
hr = mDSCbuffer->Unlock(ReadPtr1, ReadCnt1, ReadPtr2, ReadCnt2);
mCursor = ReadCursor;
}
if(FAILED(hr))
{
ERR("update failed: 0x%08lx\n", hr);
mDevice->handleDisconnect("Failure retrieving capture data: 0x%lx", hr);
}
return static_cast<uint>(mRing->readSpace());
}
} // namespace
BackendFactory &DSoundBackendFactory::getFactory()
{
static DSoundBackendFactory factory{};
return factory;
}
bool DSoundBackendFactory::init()
{
#ifdef HAVE_DYNLOAD
if(!ds_handle)
{
ds_handle = LoadLib("dsound.dll");
if(!ds_handle)
{
ERR("Failed to load dsound.dll\n");
return false;
}
#define LOAD_FUNC(f) do { \
p##f = reinterpret_cast<decltype(p##f)>(GetSymbol(ds_handle, #f)); \
if(!p##f) \
{ \
CloseLib(ds_handle); \
ds_handle = nullptr; \
return false; \
} \
} while(0)
LOAD_FUNC(DirectSoundCreate);
LOAD_FUNC(DirectSoundEnumerateW);
LOAD_FUNC(DirectSoundCaptureCreate);
LOAD_FUNC(DirectSoundCaptureEnumerateW);
#undef LOAD_FUNC
}
#endif
return true;
}
bool DSoundBackendFactory::querySupport(BackendType type)
{ return (type == BackendType::Playback || type == BackendType::Capture); }
std::string DSoundBackendFactory::probe(BackendType type)
{
std::string outnames;
auto add_device = [&outnames](const DevMap &entry) -> void
{
/* +1 to also append the null char (to ensure a null-separated list and
* double-null terminated list).
*/
outnames.append(entry.name.c_str(), entry.name.length()+1);
};
/* Initialize COM to prevent name truncation */
HRESULT hr;
HRESULT hrcom{CoInitialize(nullptr)};
switch(type)
{
case BackendType::Playback:
PlaybackDevices.clear();
hr = DirectSoundEnumerateW(DSoundEnumDevices, &PlaybackDevices);
if(FAILED(hr))
ERR("Error enumerating DirectSound playback devices (0x%lx)!\n", hr);
std::for_each(PlaybackDevices.cbegin(), PlaybackDevices.cend(), add_device);
break;
case BackendType::Capture:
CaptureDevices.clear();
hr = DirectSoundCaptureEnumerateW(DSoundEnumDevices, &CaptureDevices);
if(FAILED(hr))
ERR("Error enumerating DirectSound capture devices (0x%lx)!\n", hr);
std::for_each(CaptureDevices.cbegin(), CaptureDevices.cend(), add_device);
break;
}
if(SUCCEEDED(hrcom))
CoUninitialize();
return outnames;
}
BackendPtr DSoundBackendFactory::createBackend(ALCdevice *device, BackendType type)
{
if(type == BackendType::Playback)
return BackendPtr{new DSoundPlayback{device}};
if(type == BackendType::Capture)
return BackendPtr{new DSoundCapture{device}};
return nullptr;
}

View file

@ -0,0 +1,19 @@
#ifndef BACKENDS_DSOUND_H
#define BACKENDS_DSOUND_H
#include "backends/base.h"
struct DSoundBackendFactory final : public BackendFactory {
public:
bool init() override;
bool querySupport(BackendType type) override;
std::string probe(BackendType type) override;
BackendPtr createBackend(ALCdevice *device, BackendType type) override;
static BackendFactory &getFactory();
};
#endif /* BACKENDS_DSOUND_H */

View file

@ -1,607 +0,0 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 1999-2007 by authors.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <stdlib.h>
#include <stdio.h>
#include <memory.h>
#include "alMain.h"
#include "alu.h"
#include "alconfig.h"
#include "ringbuffer.h"
#include "threads.h"
#include "compat.h"
#include "backends/base.h"
#include <jack/jack.h>
#include <jack/ringbuffer.h>
static const ALCchar jackDevice[] = "JACK Default";
#ifdef HAVE_DYNLOAD
#define JACK_FUNCS(MAGIC) \
MAGIC(jack_client_open); \
MAGIC(jack_client_close); \
MAGIC(jack_client_name_size); \
MAGIC(jack_get_client_name); \
MAGIC(jack_connect); \
MAGIC(jack_activate); \
MAGIC(jack_deactivate); \
MAGIC(jack_port_register); \
MAGIC(jack_port_unregister); \
MAGIC(jack_port_get_buffer); \
MAGIC(jack_port_name); \
MAGIC(jack_get_ports); \
MAGIC(jack_free); \
MAGIC(jack_get_sample_rate); \
MAGIC(jack_set_error_function); \
MAGIC(jack_set_process_callback); \
MAGIC(jack_set_buffer_size_callback); \
MAGIC(jack_set_buffer_size); \
MAGIC(jack_get_buffer_size);
static void *jack_handle;
#define MAKE_FUNC(f) static __typeof(f) * p##f
JACK_FUNCS(MAKE_FUNC);
static __typeof(jack_error_callback) * pjack_error_callback;
#undef MAKE_FUNC
#define jack_client_open pjack_client_open
#define jack_client_close pjack_client_close
#define jack_client_name_size pjack_client_name_size
#define jack_get_client_name pjack_get_client_name
#define jack_connect pjack_connect
#define jack_activate pjack_activate
#define jack_deactivate pjack_deactivate
#define jack_port_register pjack_port_register
#define jack_port_unregister pjack_port_unregister
#define jack_port_get_buffer pjack_port_get_buffer
#define jack_port_name pjack_port_name
#define jack_get_ports pjack_get_ports
#define jack_free pjack_free
#define jack_get_sample_rate pjack_get_sample_rate
#define jack_set_error_function pjack_set_error_function
#define jack_set_process_callback pjack_set_process_callback
#define jack_set_buffer_size_callback pjack_set_buffer_size_callback
#define jack_set_buffer_size pjack_set_buffer_size
#define jack_get_buffer_size pjack_get_buffer_size
#define jack_error_callback (*pjack_error_callback)
#endif
static jack_options_t ClientOptions = JackNullOption;
static ALCboolean jack_load(void)
{
ALCboolean error = ALC_FALSE;
#ifdef HAVE_DYNLOAD
if(!jack_handle)
{
al_string missing_funcs = AL_STRING_INIT_STATIC();
#ifdef _WIN32
#define JACKLIB "libjack.dll"
#else
#define JACKLIB "libjack.so.0"
#endif
jack_handle = LoadLib(JACKLIB);
if(!jack_handle)
{
WARN("Failed to load %s\n", JACKLIB);
return ALC_FALSE;
}
error = ALC_FALSE;
#define LOAD_FUNC(f) do { \
p##f = GetSymbol(jack_handle, #f); \
if(p##f == NULL) { \
error = ALC_TRUE; \
alstr_append_cstr(&missing_funcs, "\n" #f); \
} \
} while(0)
JACK_FUNCS(LOAD_FUNC);
#undef LOAD_FUNC
/* Optional symbols. These don't exist in all versions of JACK. */
#define LOAD_SYM(f) p##f = GetSymbol(jack_handle, #f)
LOAD_SYM(jack_error_callback);
#undef LOAD_SYM
if(error)
{
WARN("Missing expected functions:%s\n", alstr_get_cstr(missing_funcs));
CloseLib(jack_handle);
jack_handle = NULL;
}
alstr_reset(&missing_funcs);
}
#endif
return !error;
}
typedef struct ALCjackPlayback {
DERIVE_FROM_TYPE(ALCbackend);
jack_client_t *Client;
jack_port_t *Port[MAX_OUTPUT_CHANNELS];
ll_ringbuffer_t *Ring;
alsem_t Sem;
ATOMIC(ALenum) killNow;
althrd_t thread;
} ALCjackPlayback;
static int ALCjackPlayback_bufferSizeNotify(jack_nframes_t numframes, void *arg);
static int ALCjackPlayback_process(jack_nframes_t numframes, void *arg);
static int ALCjackPlayback_mixerProc(void *arg);
static void ALCjackPlayback_Construct(ALCjackPlayback *self, ALCdevice *device);
static void ALCjackPlayback_Destruct(ALCjackPlayback *self);
static ALCenum ALCjackPlayback_open(ALCjackPlayback *self, const ALCchar *name);
static ALCboolean ALCjackPlayback_reset(ALCjackPlayback *self);
static ALCboolean ALCjackPlayback_start(ALCjackPlayback *self);
static void ALCjackPlayback_stop(ALCjackPlayback *self);
static DECLARE_FORWARD2(ALCjackPlayback, ALCbackend, ALCenum, captureSamples, void*, ALCuint)
static DECLARE_FORWARD(ALCjackPlayback, ALCbackend, ALCuint, availableSamples)
static ClockLatency ALCjackPlayback_getClockLatency(ALCjackPlayback *self);
static DECLARE_FORWARD(ALCjackPlayback, ALCbackend, void, lock)
static DECLARE_FORWARD(ALCjackPlayback, ALCbackend, void, unlock)
DECLARE_DEFAULT_ALLOCATORS(ALCjackPlayback)
DEFINE_ALCBACKEND_VTABLE(ALCjackPlayback);
static void ALCjackPlayback_Construct(ALCjackPlayback *self, ALCdevice *device)
{
ALuint i;
ALCbackend_Construct(STATIC_CAST(ALCbackend, self), device);
SET_VTABLE2(ALCjackPlayback, ALCbackend, self);
alsem_init(&self->Sem, 0);
self->Client = NULL;
for(i = 0;i < MAX_OUTPUT_CHANNELS;i++)
self->Port[i] = NULL;
self->Ring = NULL;
ATOMIC_INIT(&self->killNow, AL_TRUE);
}
static void ALCjackPlayback_Destruct(ALCjackPlayback *self)
{
ALuint i;
if(self->Client)
{
for(i = 0;i < MAX_OUTPUT_CHANNELS;i++)
{
if(self->Port[i])
jack_port_unregister(self->Client, self->Port[i]);
self->Port[i] = NULL;
}
jack_client_close(self->Client);
self->Client = NULL;
}
alsem_destroy(&self->Sem);
ALCbackend_Destruct(STATIC_CAST(ALCbackend, self));
}
static int ALCjackPlayback_bufferSizeNotify(jack_nframes_t numframes, void *arg)
{
ALCjackPlayback *self = arg;
ALCdevice *device = STATIC_CAST(ALCbackend,self)->mDevice;
ALuint bufsize;
ALCjackPlayback_lock(self);
device->UpdateSize = numframes;
device->NumUpdates = 2;
bufsize = device->UpdateSize;
if(ConfigValueUInt(alstr_get_cstr(device->DeviceName), "jack", "buffer-size", &bufsize))
bufsize = maxu(NextPowerOf2(bufsize), device->UpdateSize);
device->NumUpdates = (bufsize+device->UpdateSize) / device->UpdateSize;
TRACE("%u update size x%u\n", device->UpdateSize, device->NumUpdates);
ll_ringbuffer_free(self->Ring);
self->Ring = ll_ringbuffer_create(bufsize,
FrameSizeFromDevFmt(device->FmtChans, device->FmtType, device->AmbiOrder),
true
);
if(!self->Ring)
{
ERR("Failed to reallocate ringbuffer\n");
aluHandleDisconnect(device, "Failed to reallocate %u-sample buffer", bufsize);
}
ALCjackPlayback_unlock(self);
return 0;
}
static int ALCjackPlayback_process(jack_nframes_t numframes, void *arg)
{
ALCjackPlayback *self = arg;
jack_default_audio_sample_t *out[MAX_OUTPUT_CHANNELS];
ll_ringbuffer_data_t data[2];
jack_nframes_t total = 0;
jack_nframes_t todo;
ALsizei i, c, numchans;
ll_ringbuffer_get_read_vector(self->Ring, data);
for(c = 0;c < MAX_OUTPUT_CHANNELS && self->Port[c];c++)
out[c] = jack_port_get_buffer(self->Port[c], numframes);
numchans = c;
todo = minu(numframes, data[0].len);
for(c = 0;c < numchans;c++)
{
const ALfloat *restrict in = ((ALfloat*)data[0].buf) + c;
for(i = 0;(jack_nframes_t)i < todo;i++)
out[c][i] = in[i*numchans];
out[c] += todo;
}
total += todo;
todo = minu(numframes-total, data[1].len);
if(todo > 0)
{
for(c = 0;c < numchans;c++)
{
const ALfloat *restrict in = ((ALfloat*)data[1].buf) + c;
for(i = 0;(jack_nframes_t)i < todo;i++)
out[c][i] = in[i*numchans];
out[c] += todo;
}
total += todo;
}
ll_ringbuffer_read_advance(self->Ring, total);
alsem_post(&self->Sem);
if(numframes > total)
{
todo = numframes-total;
for(c = 0;c < numchans;c++)
{
for(i = 0;(jack_nframes_t)i < todo;i++)
out[c][i] = 0.0f;
}
}
return 0;
}
static int ALCjackPlayback_mixerProc(void *arg)
{
ALCjackPlayback *self = arg;
ALCdevice *device = STATIC_CAST(ALCbackend,self)->mDevice;
ll_ringbuffer_data_t data[2];
SetRTPriority();
althrd_setname(althrd_current(), MIXER_THREAD_NAME);
ALCjackPlayback_lock(self);
while(!ATOMIC_LOAD(&self->killNow, almemory_order_acquire) &&
ATOMIC_LOAD(&device->Connected, almemory_order_acquire))
{
ALuint todo, len1, len2;
if(ll_ringbuffer_write_space(self->Ring) < device->UpdateSize)
{
ALCjackPlayback_unlock(self);
alsem_wait(&self->Sem);
ALCjackPlayback_lock(self);
continue;
}
ll_ringbuffer_get_write_vector(self->Ring, data);
todo = data[0].len + data[1].len;
todo -= todo%device->UpdateSize;
len1 = minu(data[0].len, todo);
len2 = minu(data[1].len, todo-len1);
aluMixData(device, data[0].buf, len1);
if(len2 > 0)
aluMixData(device, data[1].buf, len2);
ll_ringbuffer_write_advance(self->Ring, todo);
}
ALCjackPlayback_unlock(self);
return 0;
}
static ALCenum ALCjackPlayback_open(ALCjackPlayback *self, const ALCchar *name)
{
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
const char *client_name = "alsoft";
jack_status_t status;
if(!name)
name = jackDevice;
else if(strcmp(name, jackDevice) != 0)
return ALC_INVALID_VALUE;
self->Client = jack_client_open(client_name, ClientOptions, &status, NULL);
if(self->Client == NULL)
{
ERR("jack_client_open() failed, status = 0x%02x\n", status);
return ALC_INVALID_VALUE;
}
if((status&JackServerStarted))
TRACE("JACK server started\n");
if((status&JackNameNotUnique))
{
client_name = jack_get_client_name(self->Client);
TRACE("Client name not unique, got `%s' instead\n", client_name);
}
jack_set_process_callback(self->Client, ALCjackPlayback_process, self);
jack_set_buffer_size_callback(self->Client, ALCjackPlayback_bufferSizeNotify, self);
alstr_copy_cstr(&device->DeviceName, name);
return ALC_NO_ERROR;
}
static ALCboolean ALCjackPlayback_reset(ALCjackPlayback *self)
{
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
ALsizei numchans, i;
ALuint bufsize;
for(i = 0;i < MAX_OUTPUT_CHANNELS;i++)
{
if(self->Port[i])
jack_port_unregister(self->Client, self->Port[i]);
self->Port[i] = NULL;
}
/* Ignore the requested buffer metrics and just keep one JACK-sized buffer
* ready for when requested.
*/
device->Frequency = jack_get_sample_rate(self->Client);
device->UpdateSize = jack_get_buffer_size(self->Client);
device->NumUpdates = 2;
bufsize = device->UpdateSize;
if(ConfigValueUInt(alstr_get_cstr(device->DeviceName), "jack", "buffer-size", &bufsize))
bufsize = maxu(NextPowerOf2(bufsize), device->UpdateSize);
device->NumUpdates = (bufsize+device->UpdateSize) / device->UpdateSize;
/* Force 32-bit float output. */
device->FmtType = DevFmtFloat;
numchans = ChannelsFromDevFmt(device->FmtChans, device->AmbiOrder);
for(i = 0;i < numchans;i++)
{
char name[64];
snprintf(name, sizeof(name), "channel_%d", i+1);
self->Port[i] = jack_port_register(self->Client, name, JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0);
if(self->Port[i] == NULL)
{
ERR("Not enough JACK ports available for %s output\n", DevFmtChannelsString(device->FmtChans));
if(i == 0) return ALC_FALSE;
break;
}
}
if(i < numchans)
{
if(i == 1)
device->FmtChans = DevFmtMono;
else
{
for(--i;i >= 2;i--)
{
jack_port_unregister(self->Client, self->Port[i]);
self->Port[i] = NULL;
}
device->FmtChans = DevFmtStereo;
}
}
ll_ringbuffer_free(self->Ring);
self->Ring = ll_ringbuffer_create(bufsize,
FrameSizeFromDevFmt(device->FmtChans, device->FmtType, device->AmbiOrder),
true
);
if(!self->Ring)
{
ERR("Failed to allocate ringbuffer\n");
return ALC_FALSE;
}
SetDefaultChannelOrder(device);
return ALC_TRUE;
}
static ALCboolean ALCjackPlayback_start(ALCjackPlayback *self)
{
const char **ports;
ALsizei i;
if(jack_activate(self->Client))
{
ERR("Failed to activate client\n");
return ALC_FALSE;
}
ports = jack_get_ports(self->Client, NULL, NULL, JackPortIsPhysical|JackPortIsInput);
if(ports == NULL)
{
ERR("No physical playback ports found\n");
jack_deactivate(self->Client);
return ALC_FALSE;
}
for(i = 0;i < MAX_OUTPUT_CHANNELS && self->Port[i];i++)
{
if(!ports[i])
{
ERR("No physical playback port for \"%s\"\n", jack_port_name(self->Port[i]));
break;
}
if(jack_connect(self->Client, jack_port_name(self->Port[i]), ports[i]))
ERR("Failed to connect output port \"%s\" to \"%s\"\n", jack_port_name(self->Port[i]), ports[i]);
}
jack_free(ports);
ATOMIC_STORE(&self->killNow, AL_FALSE, almemory_order_release);
if(althrd_create(&self->thread, ALCjackPlayback_mixerProc, self) != althrd_success)
{
jack_deactivate(self->Client);
return ALC_FALSE;
}
return ALC_TRUE;
}
static void ALCjackPlayback_stop(ALCjackPlayback *self)
{
int res;
if(ATOMIC_EXCHANGE(&self->killNow, AL_TRUE, almemory_order_acq_rel))
return;
alsem_post(&self->Sem);
althrd_join(self->thread, &res);
jack_deactivate(self->Client);
}
static ClockLatency ALCjackPlayback_getClockLatency(ALCjackPlayback *self)
{
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
ClockLatency ret;
ALCjackPlayback_lock(self);
ret.ClockTime = GetDeviceClockTime(device);
ret.Latency = ll_ringbuffer_read_space(self->Ring) * DEVICE_CLOCK_RES /
device->Frequency;
ALCjackPlayback_unlock(self);
return ret;
}
static void jack_msg_handler(const char *message)
{
WARN("%s\n", message);
}
typedef struct ALCjackBackendFactory {
DERIVE_FROM_TYPE(ALCbackendFactory);
} ALCjackBackendFactory;
#define ALCJACKBACKENDFACTORY_INITIALIZER { { GET_VTABLE2(ALCjackBackendFactory, ALCbackendFactory) } }
static ALCboolean ALCjackBackendFactory_init(ALCjackBackendFactory* UNUSED(self))
{
void (*old_error_cb)(const char*);
jack_client_t *client;
jack_status_t status;
if(!jack_load())
return ALC_FALSE;
if(!GetConfigValueBool(NULL, "jack", "spawn-server", 0))
ClientOptions |= JackNoStartServer;
old_error_cb = (&jack_error_callback ? jack_error_callback : NULL);
jack_set_error_function(jack_msg_handler);
client = jack_client_open("alsoft", ClientOptions, &status, NULL);
jack_set_error_function(old_error_cb);
if(client == NULL)
{
WARN("jack_client_open() failed, 0x%02x\n", status);
if((status&JackServerFailed) && !(ClientOptions&JackNoStartServer))
ERR("Unable to connect to JACK server\n");
return ALC_FALSE;
}
jack_client_close(client);
return ALC_TRUE;
}
static void ALCjackBackendFactory_deinit(ALCjackBackendFactory* UNUSED(self))
{
#ifdef HAVE_DYNLOAD
if(jack_handle)
CloseLib(jack_handle);
jack_handle = NULL;
#endif
}
static ALCboolean ALCjackBackendFactory_querySupport(ALCjackBackendFactory* UNUSED(self), ALCbackend_Type type)
{
if(type == ALCbackend_Playback)
return ALC_TRUE;
return ALC_FALSE;
}
static void ALCjackBackendFactory_probe(ALCjackBackendFactory* UNUSED(self), enum DevProbe type)
{
switch(type)
{
case ALL_DEVICE_PROBE:
AppendAllDevicesList(jackDevice);
break;
case CAPTURE_DEVICE_PROBE:
break;
}
}
static ALCbackend* ALCjackBackendFactory_createBackend(ALCjackBackendFactory* UNUSED(self), ALCdevice *device, ALCbackend_Type type)
{
if(type == ALCbackend_Playback)
{
ALCjackPlayback *backend;
NEW_OBJ(backend, ALCjackPlayback)(device);
if(!backend) return NULL;
return STATIC_CAST(ALCbackend, backend);
}
return NULL;
}
DEFINE_ALCBACKENDFACTORY_VTABLE(ALCjackBackendFactory);
ALCbackendFactory *ALCjackBackendFactory_getFactory(void)
{
static ALCjackBackendFactory factory = ALCJACKBACKENDFACTORY_INITIALIZER;
return STATIC_CAST(ALCbackendFactory, &factory);
}

View file

@ -0,0 +1,601 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 1999-2007 by authors.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include "backends/jack.h"
#include <cstdlib>
#include <cstdio>
#include <cstring>
#include <memory.h>
#include <array>
#include <thread>
#include <functional>
#include "alcmain.h"
#include "alu.h"
#include "alconfig.h"
#include "core/logging.h"
#include "dynload.h"
#include "ringbuffer.h"
#include "threads.h"
#include <jack/jack.h>
#include <jack/ringbuffer.h>
namespace {
constexpr char jackDevice[] = "JACK Default";
#ifdef HAVE_DYNLOAD
#define JACK_FUNCS(MAGIC) \
MAGIC(jack_client_open); \
MAGIC(jack_client_close); \
MAGIC(jack_client_name_size); \
MAGIC(jack_get_client_name); \
MAGIC(jack_connect); \
MAGIC(jack_activate); \
MAGIC(jack_deactivate); \
MAGIC(jack_port_register); \
MAGIC(jack_port_unregister); \
MAGIC(jack_port_get_buffer); \
MAGIC(jack_port_name); \
MAGIC(jack_get_ports); \
MAGIC(jack_free); \
MAGIC(jack_get_sample_rate); \
MAGIC(jack_set_error_function); \
MAGIC(jack_set_process_callback); \
MAGIC(jack_set_buffer_size_callback); \
MAGIC(jack_set_buffer_size); \
MAGIC(jack_get_buffer_size);
void *jack_handle;
#define MAKE_FUNC(f) decltype(f) * p##f
JACK_FUNCS(MAKE_FUNC)
decltype(jack_error_callback) * pjack_error_callback;
#undef MAKE_FUNC
#ifndef IN_IDE_PARSER
#define jack_client_open pjack_client_open
#define jack_client_close pjack_client_close
#define jack_client_name_size pjack_client_name_size
#define jack_get_client_name pjack_get_client_name
#define jack_connect pjack_connect
#define jack_activate pjack_activate
#define jack_deactivate pjack_deactivate
#define jack_port_register pjack_port_register
#define jack_port_unregister pjack_port_unregister
#define jack_port_get_buffer pjack_port_get_buffer
#define jack_port_name pjack_port_name
#define jack_get_ports pjack_get_ports
#define jack_free pjack_free
#define jack_get_sample_rate pjack_get_sample_rate
#define jack_set_error_function pjack_set_error_function
#define jack_set_process_callback pjack_set_process_callback
#define jack_set_buffer_size_callback pjack_set_buffer_size_callback
#define jack_set_buffer_size pjack_set_buffer_size
#define jack_get_buffer_size pjack_get_buffer_size
#define jack_error_callback (*pjack_error_callback)
#endif
#endif
jack_options_t ClientOptions = JackNullOption;
bool jack_load()
{
bool error{false};
#ifdef HAVE_DYNLOAD
if(!jack_handle)
{
std::string missing_funcs;
#ifdef _WIN32
#define JACKLIB "libjack.dll"
#else
#define JACKLIB "libjack.so.0"
#endif
jack_handle = LoadLib(JACKLIB);
if(!jack_handle)
{
WARN("Failed to load %s\n", JACKLIB);
return false;
}
error = false;
#define LOAD_FUNC(f) do { \
p##f = reinterpret_cast<decltype(p##f)>(GetSymbol(jack_handle, #f)); \
if(p##f == nullptr) { \
error = true; \
missing_funcs += "\n" #f; \
} \
} while(0)
JACK_FUNCS(LOAD_FUNC);
#undef LOAD_FUNC
/* Optional symbols. These don't exist in all versions of JACK. */
#define LOAD_SYM(f) p##f = reinterpret_cast<decltype(p##f)>(GetSymbol(jack_handle, #f))
LOAD_SYM(jack_error_callback);
#undef LOAD_SYM
if(error)
{
WARN("Missing expected functions:%s\n", missing_funcs.c_str());
CloseLib(jack_handle);
jack_handle = nullptr;
}
}
#endif
return !error;
}
struct DeviceEntry {
std::string mName;
std::string mPattern;
};
al::vector<DeviceEntry> PlaybackList;
void EnumerateDevices(al::vector<DeviceEntry> &list)
{
al::vector<DeviceEntry>{}.swap(list);
list.emplace_back(DeviceEntry{jackDevice, ""});
std::string customList{ConfigValueStr(nullptr, "jack", "custom-devices").value_or("")};
size_t strpos{0};
while(strpos < customList.size())
{
size_t nextpos{customList.find(';', strpos)};
size_t seppos{customList.find('=', strpos)};
if(seppos >= nextpos || seppos == strpos)
{
const std::string entry{customList.substr(strpos, nextpos-strpos)};
ERR("Invalid device entry: \"%s\"\n", entry.c_str());
if(nextpos != std::string::npos) ++nextpos;
strpos = nextpos;
continue;
}
size_t count{1};
std::string name{customList.substr(strpos, seppos-strpos)};
auto check_name = [&name](const DeviceEntry &entry) -> bool
{ return entry.mName == name; };
while(std::find_if(list.cbegin(), list.cend(), check_name) != list.cend())
{
name = customList.substr(strpos, seppos-strpos);
name += " #";
name += std::to_string(++count);
}
++seppos;
list.emplace_back(DeviceEntry{std::move(name), customList.substr(seppos, nextpos-seppos)});
const auto &entry = list.back();
TRACE("Got custom device: %s = %s\n", entry.mName.c_str(), entry.mPattern.c_str());
if(nextpos != std::string::npos) ++nextpos;
strpos = nextpos;
}
}
struct JackPlayback final : public BackendBase {
JackPlayback(ALCdevice *device) noexcept : BackendBase{device} { }
~JackPlayback() override;
int process(jack_nframes_t numframes) noexcept;
static int processC(jack_nframes_t numframes, void *arg) noexcept
{ return static_cast<JackPlayback*>(arg)->process(numframes); }
int mixerProc();
void open(const char *name) override;
bool reset() override;
void start() override;
void stop() override;
ClockLatency getClockLatency() override;
std::string mPortPattern;
jack_client_t *mClient{nullptr};
std::array<jack_port_t*,MAX_OUTPUT_CHANNELS> mPort{};
std::mutex mMutex;
std::atomic<bool> mPlaying{false};
RingBufferPtr mRing;
al::semaphore mSem;
std::atomic<bool> mKillNow{true};
std::thread mThread;
DEF_NEWDEL(JackPlayback)
};
JackPlayback::~JackPlayback()
{
if(!mClient)
return;
std::for_each(mPort.begin(), mPort.end(),
[this](jack_port_t *port) -> void
{ if(port) jack_port_unregister(mClient, port); }
);
mPort.fill(nullptr);
jack_client_close(mClient);
mClient = nullptr;
}
int JackPlayback::process(jack_nframes_t numframes) noexcept
{
std::array<jack_default_audio_sample_t*,MAX_OUTPUT_CHANNELS> out;
size_t numchans{0};
for(auto port : mPort)
{
if(!port) break;
out[numchans++] = static_cast<float*>(jack_port_get_buffer(port, numframes));
}
jack_nframes_t total{0};
if LIKELY(mPlaying.load(std::memory_order_acquire))
{
auto data = mRing->getReadVector();
jack_nframes_t todo{minu(numframes, static_cast<uint>(data.first.len))};
auto write_first = [&data,numchans,todo](float *outbuf) -> float*
{
const float *RESTRICT in = reinterpret_cast<float*>(data.first.buf);
auto deinterlace_input = [&in,numchans]() noexcept -> float
{
float ret{*in};
in += numchans;
return ret;
};
std::generate_n(outbuf, todo, deinterlace_input);
data.first.buf += sizeof(float);
return outbuf + todo;
};
std::transform(out.begin(), out.begin()+numchans, out.begin(), write_first);
total += todo;
todo = minu(numframes-total, static_cast<uint>(data.second.len));
if(todo > 0)
{
auto write_second = [&data,numchans,todo](float *outbuf) -> float*
{
const float *RESTRICT in = reinterpret_cast<float*>(data.second.buf);
auto deinterlace_input = [&in,numchans]() noexcept -> float
{
float ret{*in};
in += numchans;
return ret;
};
std::generate_n(outbuf, todo, deinterlace_input);
data.second.buf += sizeof(float);
return outbuf + todo;
};
std::transform(out.begin(), out.begin()+numchans, out.begin(), write_second);
total += todo;
}
mRing->readAdvance(total);
mSem.post();
}
if(numframes > total)
{
const jack_nframes_t todo{numframes - total};
auto clear_buf = [todo](float *outbuf) -> void { std::fill_n(outbuf, todo, 0.0f); };
std::for_each(out.begin(), out.begin()+numchans, clear_buf);
}
return 0;
}
int JackPlayback::mixerProc()
{
SetRTPriority();
althrd_setname(MIXER_THREAD_NAME);
const size_t frame_step{mDevice->channelsFromFmt()};
while(!mKillNow.load(std::memory_order_acquire)
&& mDevice->Connected.load(std::memory_order_acquire))
{
if(mRing->writeSpace() < mDevice->UpdateSize)
{
mSem.wait();
continue;
}
auto data = mRing->getWriteVector();
size_t todo{data.first.len + data.second.len};
todo -= todo%mDevice->UpdateSize;
const auto len1 = static_cast<uint>(minz(data.first.len, todo));
const auto len2 = static_cast<uint>(minz(data.second.len, todo-len1));
std::lock_guard<std::mutex> _{mMutex};
mDevice->renderSamples(data.first.buf, len1, frame_step);
if(len2 > 0)
mDevice->renderSamples(data.second.buf, len2, frame_step);
mRing->writeAdvance(todo);
}
return 0;
}
void JackPlayback::open(const char *name)
{
mPortPattern.clear();
if(!name)
name = jackDevice;
else if(strcmp(name, jackDevice) != 0)
{
if(PlaybackList.empty())
EnumerateDevices(PlaybackList);
auto check_name = [name](const DeviceEntry &entry) -> bool
{ return entry.mName == name; };
auto iter = std::find_if(PlaybackList.cbegin(), PlaybackList.cend(), check_name);
if(iter == PlaybackList.cend())
throw al::backend_exception{al::backend_error::NoDevice,
"Device name \"%s\" not found", name};
mPortPattern = iter->mPattern;
}
const char *client_name{"alsoft"};
jack_status_t status;
mClient = jack_client_open(client_name, ClientOptions, &status, nullptr);
if(mClient == nullptr)
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to open client connection: 0x%02x", status};
if((status&JackServerStarted))
TRACE("JACK server started\n");
if((status&JackNameNotUnique))
{
client_name = jack_get_client_name(mClient);
TRACE("Client name not unique, got '%s' instead\n", client_name);
}
jack_set_process_callback(mClient, &JackPlayback::processC, this);
mDevice->DeviceName = name;
}
bool JackPlayback::reset()
{
std::for_each(mPort.begin(), mPort.end(),
[this](jack_port_t *port) -> void
{ if(port) jack_port_unregister(mClient, port); });
mPort.fill(nullptr);
/* Ignore the requested buffer metrics and just keep one JACK-sized buffer
* ready for when requested.
*/
mDevice->Frequency = jack_get_sample_rate(mClient);
mDevice->UpdateSize = jack_get_buffer_size(mClient);
mDevice->BufferSize = mDevice->UpdateSize * 2;
const char *devname{mDevice->DeviceName.c_str()};
uint bufsize{ConfigValueUInt(devname, "jack", "buffer-size").value_or(mDevice->UpdateSize)};
bufsize = maxu(NextPowerOf2(bufsize), mDevice->UpdateSize);
mDevice->BufferSize = bufsize + mDevice->UpdateSize;
/* Force 32-bit float output. */
mDevice->FmtType = DevFmtFloat;
auto ports_end = mPort.begin() + mDevice->channelsFromFmt();
auto bad_port = std::find_if_not(mPort.begin(), ports_end,
[this](jack_port_t *&port) -> bool
{
std::string name{"channel_" + std::to_string(&port - &mPort[0] + 1)};
port = jack_port_register(mClient, name.c_str(), JACK_DEFAULT_AUDIO_TYPE,
JackPortIsOutput, 0);
return port != nullptr;
});
if(bad_port != ports_end)
{
ERR("Not enough JACK ports available for %s output\n", DevFmtChannelsString(mDevice->FmtChans));
if(bad_port == mPort.begin()) return false;
if(bad_port == mPort.begin()+1)
mDevice->FmtChans = DevFmtMono;
else
{
ports_end = mPort.begin()+2;
while(bad_port != ports_end)
{
jack_port_unregister(mClient, *(--bad_port));
*bad_port = nullptr;
}
mDevice->FmtChans = DevFmtStereo;
}
}
setDefaultChannelOrder();
return true;
}
void JackPlayback::start()
{
if(jack_activate(mClient))
throw al::backend_exception{al::backend_error::DeviceError, "Failed to activate client"};
const char *devname{mDevice->DeviceName.c_str()};
if(ConfigValueBool(devname, "jack", "connect-ports").value_or(true))
{
const char **ports{jack_get_ports(mClient, mPortPattern.c_str(), nullptr,
JackPortIsPhysical|JackPortIsInput)};
if(ports == nullptr)
{
jack_deactivate(mClient);
throw al::backend_exception{al::backend_error::DeviceError,
"No physical playback ports found"};
}
auto connect_port = [this](const jack_port_t *port, const char *pname) -> bool
{
if(!port) return false;
if(!pname)
{
ERR("No physical playback port for \"%s\"\n", jack_port_name(port));
return false;
}
if(jack_connect(mClient, jack_port_name(port), pname))
ERR("Failed to connect output port \"%s\" to \"%s\"\n", jack_port_name(port),
pname);
return true;
};
std::mismatch(mPort.begin(), mPort.end(), ports, connect_port);
jack_free(ports);
}
/* Reconfigure buffer metrics in case the server changed it since the reset
* (it won't change again after jack_activate), then allocate the ring
* buffer with the appropriate size.
*/
mDevice->Frequency = jack_get_sample_rate(mClient);
mDevice->UpdateSize = jack_get_buffer_size(mClient);
mDevice->BufferSize = mDevice->UpdateSize * 2;
uint bufsize{ConfigValueUInt(devname, "jack", "buffer-size").value_or(mDevice->UpdateSize)};
bufsize = maxu(NextPowerOf2(bufsize), mDevice->UpdateSize);
mDevice->BufferSize = bufsize + mDevice->UpdateSize;
mRing = nullptr;
mRing = RingBuffer::Create(bufsize, mDevice->frameSizeFromFmt(), true);
try {
mPlaying.store(true, std::memory_order_release);
mKillNow.store(false, std::memory_order_release);
mThread = std::thread{std::mem_fn(&JackPlayback::mixerProc), this};
}
catch(std::exception& e) {
jack_deactivate(mClient);
mPlaying.store(false, std::memory_order_release);
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to start mixing thread: %s", e.what()};
}
}
void JackPlayback::stop()
{
if(mKillNow.exchange(true, std::memory_order_acq_rel) || !mThread.joinable())
return;
mSem.post();
mThread.join();
jack_deactivate(mClient);
mPlaying.store(false, std::memory_order_release);
}
ClockLatency JackPlayback::getClockLatency()
{
ClockLatency ret;
std::lock_guard<std::mutex> _{mMutex};
ret.ClockTime = GetDeviceClockTime(mDevice);
ret.Latency = std::chrono::seconds{mRing->readSpace()};
ret.Latency /= mDevice->Frequency;
return ret;
}
void jack_msg_handler(const char *message)
{
WARN("%s\n", message);
}
} // namespace
bool JackBackendFactory::init()
{
if(!jack_load())
return false;
if(!GetConfigValueBool(nullptr, "jack", "spawn-server", 0))
ClientOptions = static_cast<jack_options_t>(ClientOptions | JackNoStartServer);
void (*old_error_cb)(const char*){&jack_error_callback ? jack_error_callback : nullptr};
jack_set_error_function(jack_msg_handler);
jack_status_t status;
jack_client_t *client{jack_client_open("alsoft", ClientOptions, &status, nullptr)};
jack_set_error_function(old_error_cb);
if(!client)
{
WARN("jack_client_open() failed, 0x%02x\n", status);
if((status&JackServerFailed) && !(ClientOptions&JackNoStartServer))
ERR("Unable to connect to JACK server\n");
return false;
}
jack_client_close(client);
return true;
}
bool JackBackendFactory::querySupport(BackendType type)
{ return (type == BackendType::Playback); }
std::string JackBackendFactory::probe(BackendType type)
{
std::string outnames;
auto append_name = [&outnames](const DeviceEntry &entry) -> void
{
/* Includes null char. */
outnames.append(entry.mName.c_str(), entry.mName.length()+1);
};
switch(type)
{
case BackendType::Playback:
EnumerateDevices(PlaybackList);
std::for_each(PlaybackList.cbegin(), PlaybackList.cend(), append_name);
break;
case BackendType::Capture:
break;
}
return outnames;
}
BackendPtr JackBackendFactory::createBackend(ALCdevice *device, BackendType type)
{
if(type == BackendType::Playback)
return BackendPtr{new JackPlayback{device}};
return nullptr;
}
BackendFactory &JackBackendFactory::getFactory()
{
static JackBackendFactory factory{};
return factory;
}

View file

@ -0,0 +1,19 @@
#ifndef BACKENDS_JACK_H
#define BACKENDS_JACK_H
#include "backends/base.h"
struct JackBackendFactory final : public BackendFactory {
public:
bool init() override;
bool querySupport(BackendType type) override;
std::string probe(BackendType type) override;
BackendPtr createBackend(ALCdevice *device, BackendType type) override;
static BackendFactory &getFactory();
};
#endif /* BACKENDS_JACK_H */

View file

@ -1,128 +0,0 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2011 by Chris Robinson
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <stdlib.h>
#include "alMain.h"
#include "alu.h"
#include "backends/base.h"
typedef struct ALCloopback {
DERIVE_FROM_TYPE(ALCbackend);
} ALCloopback;
static void ALCloopback_Construct(ALCloopback *self, ALCdevice *device);
static DECLARE_FORWARD(ALCloopback, ALCbackend, void, Destruct)
static ALCenum ALCloopback_open(ALCloopback *self, const ALCchar *name);
static ALCboolean ALCloopback_reset(ALCloopback *self);
static ALCboolean ALCloopback_start(ALCloopback *self);
static void ALCloopback_stop(ALCloopback *self);
static DECLARE_FORWARD2(ALCloopback, ALCbackend, ALCenum, captureSamples, void*, ALCuint)
static DECLARE_FORWARD(ALCloopback, ALCbackend, ALCuint, availableSamples)
static DECLARE_FORWARD(ALCloopback, ALCbackend, ClockLatency, getClockLatency)
static DECLARE_FORWARD(ALCloopback, ALCbackend, void, lock)
static DECLARE_FORWARD(ALCloopback, ALCbackend, void, unlock)
DECLARE_DEFAULT_ALLOCATORS(ALCloopback)
DEFINE_ALCBACKEND_VTABLE(ALCloopback);
static void ALCloopback_Construct(ALCloopback *self, ALCdevice *device)
{
ALCbackend_Construct(STATIC_CAST(ALCbackend, self), device);
SET_VTABLE2(ALCloopback, ALCbackend, self);
}
static ALCenum ALCloopback_open(ALCloopback *self, const ALCchar *name)
{
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
alstr_copy_cstr(&device->DeviceName, name);
return ALC_NO_ERROR;
}
static ALCboolean ALCloopback_reset(ALCloopback *self)
{
SetDefaultWFXChannelOrder(STATIC_CAST(ALCbackend, self)->mDevice);
return ALC_TRUE;
}
static ALCboolean ALCloopback_start(ALCloopback* UNUSED(self))
{
return ALC_TRUE;
}
static void ALCloopback_stop(ALCloopback* UNUSED(self))
{
}
typedef struct ALCloopbackFactory {
DERIVE_FROM_TYPE(ALCbackendFactory);
} ALCloopbackFactory;
#define ALCNULLBACKENDFACTORY_INITIALIZER { { GET_VTABLE2(ALCloopbackFactory, ALCbackendFactory) } }
ALCbackendFactory *ALCloopbackFactory_getFactory(void);
static ALCboolean ALCloopbackFactory_init(ALCloopbackFactory *self);
static DECLARE_FORWARD(ALCloopbackFactory, ALCbackendFactory, void, deinit)
static ALCboolean ALCloopbackFactory_querySupport(ALCloopbackFactory *self, ALCbackend_Type type);
static void ALCloopbackFactory_probe(ALCloopbackFactory *self, enum DevProbe type);
static ALCbackend* ALCloopbackFactory_createBackend(ALCloopbackFactory *self, ALCdevice *device, ALCbackend_Type type);
DEFINE_ALCBACKENDFACTORY_VTABLE(ALCloopbackFactory);
ALCbackendFactory *ALCloopbackFactory_getFactory(void)
{
static ALCloopbackFactory factory = ALCNULLBACKENDFACTORY_INITIALIZER;
return STATIC_CAST(ALCbackendFactory, &factory);
}
static ALCboolean ALCloopbackFactory_init(ALCloopbackFactory* UNUSED(self))
{
return ALC_TRUE;
}
static ALCboolean ALCloopbackFactory_querySupport(ALCloopbackFactory* UNUSED(self), ALCbackend_Type type)
{
if(type == ALCbackend_Loopback)
return ALC_TRUE;
return ALC_FALSE;
}
static void ALCloopbackFactory_probe(ALCloopbackFactory* UNUSED(self), enum DevProbe UNUSED(type))
{
}
static ALCbackend* ALCloopbackFactory_createBackend(ALCloopbackFactory* UNUSED(self), ALCdevice *device, ALCbackend_Type type)
{
if(type == ALCbackend_Loopback)
{
ALCloopback *backend;
NEW_OBJ(backend, ALCloopback)(device);
if(!backend) return NULL;
return STATIC_CAST(ALCbackend, backend);
}
return NULL;
}

View file

@ -0,0 +1,79 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2011 by Chris Robinson
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include "backends/loopback.h"
#include "alcmain.h"
#include "alu.h"
namespace {
struct LoopbackBackend final : public BackendBase {
LoopbackBackend(ALCdevice *device) noexcept : BackendBase{device} { }
void open(const ALCchar *name) override;
bool reset() override;
void start() override;
void stop() override;
DEF_NEWDEL(LoopbackBackend)
};
void LoopbackBackend::open(const ALCchar *name)
{
mDevice->DeviceName = name;
}
bool LoopbackBackend::reset()
{
setDefaultWFXChannelOrder();
return true;
}
void LoopbackBackend::start()
{ }
void LoopbackBackend::stop()
{ }
} // namespace
bool LoopbackBackendFactory::init()
{ return true; }
bool LoopbackBackendFactory::querySupport(BackendType)
{ return true; }
std::string LoopbackBackendFactory::probe(BackendType)
{ return std::string{}; }
BackendPtr LoopbackBackendFactory::createBackend(ALCdevice *device, BackendType)
{ return BackendPtr{new LoopbackBackend{device}}; }
BackendFactory &LoopbackBackendFactory::getFactory()
{
static LoopbackBackendFactory factory{};
return factory;
}

View file

@ -0,0 +1,19 @@
#ifndef BACKENDS_LOOPBACK_H
#define BACKENDS_LOOPBACK_H
#include "backends/base.h"
struct LoopbackBackendFactory final : public BackendFactory {
public:
bool init() override;
bool querySupport(BackendType type) override;
std::string probe(BackendType type) override;
BackendPtr createBackend(ALCdevice *device, BackendType type) override;
static BackendFactory &getFactory();
};
#endif /* BACKENDS_LOOPBACK_H */

View file

@ -1,221 +0,0 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2010 by Chris Robinson
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <stdlib.h>
#ifdef HAVE_WINDOWS_H
#include <windows.h>
#endif
#include "alMain.h"
#include "alu.h"
#include "threads.h"
#include "compat.h"
#include "backends/base.h"
typedef struct ALCnullBackend {
DERIVE_FROM_TYPE(ALCbackend);
ATOMIC(int) killNow;
althrd_t thread;
} ALCnullBackend;
static int ALCnullBackend_mixerProc(void *ptr);
static void ALCnullBackend_Construct(ALCnullBackend *self, ALCdevice *device);
static DECLARE_FORWARD(ALCnullBackend, ALCbackend, void, Destruct)
static ALCenum ALCnullBackend_open(ALCnullBackend *self, const ALCchar *name);
static ALCboolean ALCnullBackend_reset(ALCnullBackend *self);
static ALCboolean ALCnullBackend_start(ALCnullBackend *self);
static void ALCnullBackend_stop(ALCnullBackend *self);
static DECLARE_FORWARD2(ALCnullBackend, ALCbackend, ALCenum, captureSamples, void*, ALCuint)
static DECLARE_FORWARD(ALCnullBackend, ALCbackend, ALCuint, availableSamples)
static DECLARE_FORWARD(ALCnullBackend, ALCbackend, ClockLatency, getClockLatency)
static DECLARE_FORWARD(ALCnullBackend, ALCbackend, void, lock)
static DECLARE_FORWARD(ALCnullBackend, ALCbackend, void, unlock)
DECLARE_DEFAULT_ALLOCATORS(ALCnullBackend)
DEFINE_ALCBACKEND_VTABLE(ALCnullBackend);
static const ALCchar nullDevice[] = "No Output";
static void ALCnullBackend_Construct(ALCnullBackend *self, ALCdevice *device)
{
ALCbackend_Construct(STATIC_CAST(ALCbackend, self), device);
SET_VTABLE2(ALCnullBackend, ALCbackend, self);
ATOMIC_INIT(&self->killNow, AL_TRUE);
}
static int ALCnullBackend_mixerProc(void *ptr)
{
ALCnullBackend *self = (ALCnullBackend*)ptr;
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
struct timespec now, start;
ALuint64 avail, done;
const long restTime = (long)((ALuint64)device->UpdateSize * 1000000000 /
device->Frequency / 2);
SetRTPriority();
althrd_setname(althrd_current(), MIXER_THREAD_NAME);
done = 0;
if(altimespec_get(&start, AL_TIME_UTC) != AL_TIME_UTC)
{
ERR("Failed to get starting time\n");
return 1;
}
while(!ATOMIC_LOAD(&self->killNow, almemory_order_acquire) &&
ATOMIC_LOAD(&device->Connected, almemory_order_acquire))
{
if(altimespec_get(&now, AL_TIME_UTC) != AL_TIME_UTC)
{
ERR("Failed to get current time\n");
return 1;
}
avail = (now.tv_sec - start.tv_sec) * device->Frequency;
avail += (ALint64)(now.tv_nsec - start.tv_nsec) * device->Frequency / 1000000000;
if(avail < done)
{
/* Oops, time skipped backwards. Reset the number of samples done
* with one update available since we (likely) just came back from
* sleeping. */
done = avail - device->UpdateSize;
}
if(avail-done < device->UpdateSize)
al_nssleep(restTime);
else while(avail-done >= device->UpdateSize)
{
ALCnullBackend_lock(self);
aluMixData(device, NULL, device->UpdateSize);
ALCnullBackend_unlock(self);
done += device->UpdateSize;
}
}
return 0;
}
static ALCenum ALCnullBackend_open(ALCnullBackend *self, const ALCchar *name)
{
ALCdevice *device;
if(!name)
name = nullDevice;
else if(strcmp(name, nullDevice) != 0)
return ALC_INVALID_VALUE;
device = STATIC_CAST(ALCbackend, self)->mDevice;
alstr_copy_cstr(&device->DeviceName, name);
return ALC_NO_ERROR;
}
static ALCboolean ALCnullBackend_reset(ALCnullBackend *self)
{
SetDefaultWFXChannelOrder(STATIC_CAST(ALCbackend, self)->mDevice);
return ALC_TRUE;
}
static ALCboolean ALCnullBackend_start(ALCnullBackend *self)
{
ATOMIC_STORE(&self->killNow, AL_FALSE, almemory_order_release);
if(althrd_create(&self->thread, ALCnullBackend_mixerProc, self) != althrd_success)
return ALC_FALSE;
return ALC_TRUE;
}
static void ALCnullBackend_stop(ALCnullBackend *self)
{
int res;
if(ATOMIC_EXCHANGE(&self->killNow, AL_TRUE, almemory_order_acq_rel))
return;
althrd_join(self->thread, &res);
}
typedef struct ALCnullBackendFactory {
DERIVE_FROM_TYPE(ALCbackendFactory);
} ALCnullBackendFactory;
#define ALCNULLBACKENDFACTORY_INITIALIZER { { GET_VTABLE2(ALCnullBackendFactory, ALCbackendFactory) } }
ALCbackendFactory *ALCnullBackendFactory_getFactory(void);
static ALCboolean ALCnullBackendFactory_init(ALCnullBackendFactory *self);
static DECLARE_FORWARD(ALCnullBackendFactory, ALCbackendFactory, void, deinit)
static ALCboolean ALCnullBackendFactory_querySupport(ALCnullBackendFactory *self, ALCbackend_Type type);
static void ALCnullBackendFactory_probe(ALCnullBackendFactory *self, enum DevProbe type);
static ALCbackend* ALCnullBackendFactory_createBackend(ALCnullBackendFactory *self, ALCdevice *device, ALCbackend_Type type);
DEFINE_ALCBACKENDFACTORY_VTABLE(ALCnullBackendFactory);
ALCbackendFactory *ALCnullBackendFactory_getFactory(void)
{
static ALCnullBackendFactory factory = ALCNULLBACKENDFACTORY_INITIALIZER;
return STATIC_CAST(ALCbackendFactory, &factory);
}
static ALCboolean ALCnullBackendFactory_init(ALCnullBackendFactory* UNUSED(self))
{
return ALC_TRUE;
}
static ALCboolean ALCnullBackendFactory_querySupport(ALCnullBackendFactory* UNUSED(self), ALCbackend_Type type)
{
if(type == ALCbackend_Playback)
return ALC_TRUE;
return ALC_FALSE;
}
static void ALCnullBackendFactory_probe(ALCnullBackendFactory* UNUSED(self), enum DevProbe type)
{
switch(type)
{
case ALL_DEVICE_PROBE:
AppendAllDevicesList(nullDevice);
break;
case CAPTURE_DEVICE_PROBE:
break;
}
}
static ALCbackend* ALCnullBackendFactory_createBackend(ALCnullBackendFactory* UNUSED(self), ALCdevice *device, ALCbackend_Type type)
{
if(type == ALCbackend_Playback)
{
ALCnullBackend *backend;
NEW_OBJ(backend, ALCnullBackend)(device);
if(!backend) return NULL;
return STATIC_CAST(ALCbackend, backend);
}
return NULL;
}

View file

@ -0,0 +1,179 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2010 by Chris Robinson
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include "backends/null.h"
#include <exception>
#include <atomic>
#include <chrono>
#include <cstdint>
#include <cstring>
#include <functional>
#include <thread>
#include "alcmain.h"
#include "almalloc.h"
#include "alu.h"
#include "threads.h"
namespace {
using std::chrono::seconds;
using std::chrono::milliseconds;
using std::chrono::nanoseconds;
constexpr char nullDevice[] = "No Output";
struct NullBackend final : public BackendBase {
NullBackend(ALCdevice *device) noexcept : BackendBase{device} { }
int mixerProc();
void open(const char *name) override;
bool reset() override;
void start() override;
void stop() override;
std::atomic<bool> mKillNow{true};
std::thread mThread;
DEF_NEWDEL(NullBackend)
};
int NullBackend::mixerProc()
{
const milliseconds restTime{mDevice->UpdateSize*1000/mDevice->Frequency / 2};
SetRTPriority();
althrd_setname(MIXER_THREAD_NAME);
int64_t done{0};
auto start = std::chrono::steady_clock::now();
while(!mKillNow.load(std::memory_order_acquire)
&& mDevice->Connected.load(std::memory_order_acquire))
{
auto now = std::chrono::steady_clock::now();
/* This converts from nanoseconds to nanosamples, then to samples. */
int64_t avail{std::chrono::duration_cast<seconds>((now-start) * mDevice->Frequency).count()};
if(avail-done < mDevice->UpdateSize)
{
std::this_thread::sleep_for(restTime);
continue;
}
while(avail-done >= mDevice->UpdateSize)
{
mDevice->renderSamples(nullptr, mDevice->UpdateSize, 0u);
done += mDevice->UpdateSize;
}
/* For every completed second, increment the start time and reduce the
* samples done. This prevents the difference between the start time
* and current time from growing too large, while maintaining the
* correct number of samples to render.
*/
if(done >= mDevice->Frequency)
{
seconds s{done/mDevice->Frequency};
start += s;
done -= mDevice->Frequency*s.count();
}
}
return 0;
}
void NullBackend::open(const char *name)
{
if(!name)
name = nullDevice;
else if(strcmp(name, nullDevice) != 0)
throw al::backend_exception{al::backend_error::NoDevice, "Device name \"%s\" not found",
name};
mDevice->DeviceName = name;
}
bool NullBackend::reset()
{
setDefaultWFXChannelOrder();
return true;
}
void NullBackend::start()
{
try {
mKillNow.store(false, std::memory_order_release);
mThread = std::thread{std::mem_fn(&NullBackend::mixerProc), this};
}
catch(std::exception& e) {
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to start mixing thread: %s", e.what()};
}
}
void NullBackend::stop()
{
if(mKillNow.exchange(true, std::memory_order_acq_rel) || !mThread.joinable())
return;
mThread.join();
}
} // namespace
bool NullBackendFactory::init()
{ return true; }
bool NullBackendFactory::querySupport(BackendType type)
{ return (type == BackendType::Playback); }
std::string NullBackendFactory::probe(BackendType type)
{
std::string outnames;
switch(type)
{
case BackendType::Playback:
/* Includes null char. */
outnames.append(nullDevice, sizeof(nullDevice));
break;
case BackendType::Capture:
break;
}
return outnames;
}
BackendPtr NullBackendFactory::createBackend(ALCdevice *device, BackendType type)
{
if(type == BackendType::Playback)
return BackendPtr{new NullBackend{device}};
return nullptr;
}
BackendFactory &NullBackendFactory::getFactory()
{
static NullBackendFactory factory{};
return factory;
}

View file

@ -0,0 +1,19 @@
#ifndef BACKENDS_NULL_H
#define BACKENDS_NULL_H
#include "backends/base.h"
struct NullBackendFactory final : public BackendFactory {
public:
bool init() override;
bool querySupport(BackendType type) override;
std::string probe(BackendType type) override;
BackendPtr createBackend(ALCdevice *device, BackendType type) override;
static BackendFactory &getFactory();
};
#endif /* BACKENDS_NULL_H */

View file

@ -0,0 +1,250 @@
#include "config.h"
#include "oboe.h"
#include <cassert>
#include <cstring>
#include "alu.h"
#include "core/logging.h"
#include "oboe/Oboe.h"
namespace {
constexpr char device_name[] = "Oboe Default";
struct OboePlayback final : public BackendBase, public oboe::AudioStreamCallback {
OboePlayback(ALCdevice *device) : BackendBase{device} { }
oboe::ManagedStream mStream;
oboe::DataCallbackResult onAudioReady(oboe::AudioStream *oboeStream, void *audioData,
int32_t numFrames) override;
void open(const char *name) override;
bool reset() override;
void start() override;
void stop() override;
};
oboe::DataCallbackResult OboePlayback::onAudioReady(oboe::AudioStream *oboeStream, void *audioData,
int32_t numFrames)
{
assert(numFrames > 0);
const int32_t numChannels{oboeStream->getChannelCount()};
if UNLIKELY(numChannels > 2 && mDevice->FmtChans == DevFmtStereo)
{
/* If the device is only mixing stereo but there's more than two
* output channels, there are unused channels that need to be silenced.
*/
if(mStream->getFormat() == oboe::AudioFormat::Float)
memset(audioData, 0, static_cast<uint32_t>(numFrames*numChannels)*sizeof(float));
else
memset(audioData, 0, static_cast<uint32_t>(numFrames*numChannels)*sizeof(int16_t));
}
mDevice->renderSamples(audioData, static_cast<uint32_t>(numFrames),
static_cast<uint32_t>(numChannels));
return oboe::DataCallbackResult::Continue;
}
void OboePlayback::open(const char *name)
{
if(!name)
name = device_name;
else if(std::strcmp(name, device_name) != 0)
throw al::backend_exception{al::backend_error::NoDevice, "Device name \"%s\" not found",
name};
/* Open a basic output stream, just to ensure it can work. */
oboe::Result result{oboe::AudioStreamBuilder{}.setDirection(oboe::Direction::Output)
->setPerformanceMode(oboe::PerformanceMode::LowLatency)
->openManagedStream(mStream)};
if(result != oboe::Result::OK)
throw al::backend_exception{al::backend_error::DeviceError, "Failed to create stream: %s",
oboe::convertToText(result)};
mDevice->DeviceName = name;
}
bool OboePlayback::reset()
{
oboe::AudioStreamBuilder builder;
builder.setDirection(oboe::Direction::Output);
builder.setPerformanceMode(oboe::PerformanceMode::LowLatency);
/* Don't let Oboe convert. We should be able to handle anything it gives
* back.
*/
builder.setSampleRateConversionQuality(oboe::SampleRateConversionQuality::None);
builder.setChannelConversionAllowed(false);
builder.setFormatConversionAllowed(false);
builder.setCallback(this);
if(mDevice->Flags.test(FrequencyRequest))
builder.setSampleRate(static_cast<int32_t>(mDevice->Frequency));
if(mDevice->Flags.test(ChannelsRequest))
{
/* Only use mono or stereo at user request. There's no telling what
* other counts may be inferred as.
*/
builder.setChannelCount((mDevice->FmtChans==DevFmtMono) ? oboe::ChannelCount::Mono
: (mDevice->FmtChans==DevFmtStereo) ? oboe::ChannelCount::Stereo
: oboe::ChannelCount::Unspecified);
}
if(mDevice->Flags.test(SampleTypeRequest))
{
oboe::AudioFormat format{oboe::AudioFormat::Unspecified};
switch(mDevice->FmtType)
{
case DevFmtByte:
case DevFmtUByte:
case DevFmtShort:
case DevFmtUShort:
format = oboe::AudioFormat::I16;
break;
case DevFmtInt:
case DevFmtUInt:
case DevFmtFloat:
format = oboe::AudioFormat::Float;
break;
}
builder.setFormat(format);
}
oboe::Result result{builder.openManagedStream(mStream)};
/* If the format failed, try asking for the defaults. */
while(result == oboe::Result::ErrorInvalidFormat)
{
if(builder.getFormat() != oboe::AudioFormat::Unspecified)
builder.setFormat(oboe::AudioFormat::Unspecified);
else if(builder.getSampleRate() != oboe::kUnspecified)
builder.setSampleRate(oboe::kUnspecified);
else if(builder.getChannelCount() != oboe::ChannelCount::Unspecified)
builder.setChannelCount(oboe::ChannelCount::Unspecified);
else
break;
result = builder.openManagedStream(mStream);
}
if(result != oboe::Result::OK)
throw al::backend_exception{al::backend_error::DeviceError, "Failed to create stream: %s",
oboe::convertToText(result)};
TRACE("Got stream with properties:\n%s", oboe::convertToText(mStream.get()));
switch(mStream->getChannelCount())
{
case oboe::ChannelCount::Mono:
mDevice->FmtChans = DevFmtMono;
break;
case oboe::ChannelCount::Stereo:
mDevice->FmtChans = DevFmtStereo;
break;
/* Other potential configurations. Could be wrong, but better than failing.
* Assume WFX channel order.
*/
case 4:
mDevice->FmtChans = DevFmtQuad;
break;
case 6:
mDevice->FmtChans = DevFmtX51Rear;
break;
case 7:
mDevice->FmtChans = DevFmtX61;
break;
case 8:
mDevice->FmtChans = DevFmtX71;
break;
default:
if(mStream->getChannelCount() < 1)
throw al::backend_exception{al::backend_error::DeviceError,
"Got unhandled channel count: %d", mStream->getChannelCount()};
/* Assume first two channels are front left/right. We can do a stereo
* mix and keep the other channels silent.
*/
mDevice->FmtChans = DevFmtStereo;
break;
}
setDefaultWFXChannelOrder();
switch(mStream->getFormat())
{
case oboe::AudioFormat::I16:
mDevice->FmtType = DevFmtShort;
break;
case oboe::AudioFormat::Float:
mDevice->FmtType = DevFmtFloat;
break;
case oboe::AudioFormat::Unspecified:
case oboe::AudioFormat::Invalid:
throw al::backend_exception{al::backend_error::DeviceError,
"Got unhandled sample type: %s", oboe::convertToText(mStream->getFormat())};
}
mDevice->Frequency = static_cast<uint32_t>(mStream->getSampleRate());
/* Ensure the period size is no less than 10ms. It's possible for FramesPerCallback to be 0
* indicating variable updates, but OpenAL should have a reasonable minimum update size set.
* FramesPerBurst may not necessarily be correct, but hopefully it can act as a minimum
* update size.
*/
mDevice->UpdateSize = maxu(mDevice->Frequency / 100,
static_cast<uint32_t>(mStream->getFramesPerBurst()));
mDevice->BufferSize = maxu(mDevice->UpdateSize * 2,
static_cast<uint32_t>(mStream->getBufferSizeInFrames()));
return true;
}
void OboePlayback::start()
{
const oboe::Result result{mStream->start()};
if(result != oboe::Result::OK)
throw al::backend_exception{al::backend_error::DeviceError, "Failed to start stream: %s",
oboe::convertToText(result)};
}
void OboePlayback::stop()
{
oboe::Result result{mStream->stop()};
if(result != oboe::Result::OK)
throw al::backend_exception{al::backend_error::DeviceError, "Failed to stop stream: %s",
oboe::convertToText(result)};
}
} // namespace
bool OboeBackendFactory::init() { return true; }
bool OboeBackendFactory::querySupport(BackendType type)
{ return type == BackendType::Playback; }
std::string OboeBackendFactory::probe(BackendType type)
{
switch(type)
{
case BackendType::Playback:
/* Includes null char. */
return std::string{device_name, sizeof(device_name)};
case BackendType::Capture:
break;
}
return std::string{};
}
BackendPtr OboeBackendFactory::createBackend(ALCdevice *device, BackendType type)
{
if(type == BackendType::Playback)
return BackendPtr{new OboePlayback{device}};
return nullptr;
}
BackendFactory &OboeBackendFactory::getFactory()
{
static OboeBackendFactory factory{};
return factory;
}

View file

@ -0,0 +1,19 @@
#ifndef BACKENDS_OBOE_H
#define BACKENDS_OBOE_H
#include "backends/base.h"
struct OboeBackendFactory final : public BackendFactory {
public:
bool init() override;
bool querySupport(BackendType type) override;
std::string probe(BackendType type) override;
BackendPtr createBackend(ALCdevice *device, BackendType type) override;
static BackendFactory &getFactory();
};
#endif /* BACKENDS_OBOE_H */

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,975 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* This is an OpenAL backend for Android using the native audio APIs based on
* OpenSL ES 1.0.1. It is based on source code for the native-audio sample app
* bundled with NDK.
*/
#include "config.h"
#include "backends/opensl.h"
#include <stdlib.h>
#include <jni.h>
#include <new>
#include <array>
#include <cstring>
#include <thread>
#include <functional>
#include "albit.h"
#include "alcmain.h"
#include "alu.h"
#include "compat.h"
#include "core/logging.h"
#include "opthelpers.h"
#include "ringbuffer.h"
#include "threads.h"
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
#include <SLES/OpenSLES_AndroidConfiguration.h>
namespace {
/* Helper macros */
#define EXTRACT_VCALL_ARGS(...) __VA_ARGS__))
#define VCALL(obj, func) ((*(obj))->func((obj), EXTRACT_VCALL_ARGS
#define VCALL0(obj, func) ((*(obj))->func((obj) EXTRACT_VCALL_ARGS
constexpr char opensl_device[] = "OpenSL";
constexpr SLuint32 GetChannelMask(DevFmtChannels chans) noexcept
{
switch(chans)
{
case DevFmtMono: return SL_SPEAKER_FRONT_CENTER;
case DevFmtStereo: return SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT;
case DevFmtQuad: return SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT |
SL_SPEAKER_BACK_LEFT | SL_SPEAKER_BACK_RIGHT;
case DevFmtX51: return SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT |
SL_SPEAKER_FRONT_CENTER | SL_SPEAKER_LOW_FREQUENCY | SL_SPEAKER_SIDE_LEFT |
SL_SPEAKER_SIDE_RIGHT;
case DevFmtX51Rear: return SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT |
SL_SPEAKER_FRONT_CENTER | SL_SPEAKER_LOW_FREQUENCY | SL_SPEAKER_BACK_LEFT |
SL_SPEAKER_BACK_RIGHT;
case DevFmtX61: return SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT |
SL_SPEAKER_FRONT_CENTER | SL_SPEAKER_LOW_FREQUENCY | SL_SPEAKER_BACK_CENTER |
SL_SPEAKER_SIDE_LEFT | SL_SPEAKER_SIDE_RIGHT;
case DevFmtX71: return SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT |
SL_SPEAKER_FRONT_CENTER | SL_SPEAKER_LOW_FREQUENCY | SL_SPEAKER_BACK_LEFT |
SL_SPEAKER_BACK_RIGHT | SL_SPEAKER_SIDE_LEFT | SL_SPEAKER_SIDE_RIGHT;
case DevFmtAmbi3D:
break;
}
return 0;
}
#ifdef SL_ANDROID_DATAFORMAT_PCM_EX
constexpr SLuint32 GetTypeRepresentation(DevFmtType type) noexcept
{
switch(type)
{
case DevFmtUByte:
case DevFmtUShort:
case DevFmtUInt:
return SL_ANDROID_PCM_REPRESENTATION_UNSIGNED_INT;
case DevFmtByte:
case DevFmtShort:
case DevFmtInt:
return SL_ANDROID_PCM_REPRESENTATION_SIGNED_INT;
case DevFmtFloat:
return SL_ANDROID_PCM_REPRESENTATION_FLOAT;
}
return 0;
}
#endif
constexpr SLuint32 GetByteOrderEndianness() noexcept
{
if_constexpr(al::endian::native == al::endian::little)
return SL_BYTEORDER_LITTLEENDIAN;
return SL_BYTEORDER_BIGENDIAN;
}
const char *res_str(SLresult result) noexcept
{
switch(result)
{
case SL_RESULT_SUCCESS: return "Success";
case SL_RESULT_PRECONDITIONS_VIOLATED: return "Preconditions violated";
case SL_RESULT_PARAMETER_INVALID: return "Parameter invalid";
case SL_RESULT_MEMORY_FAILURE: return "Memory failure";
case SL_RESULT_RESOURCE_ERROR: return "Resource error";
case SL_RESULT_RESOURCE_LOST: return "Resource lost";
case SL_RESULT_IO_ERROR: return "I/O error";
case SL_RESULT_BUFFER_INSUFFICIENT: return "Buffer insufficient";
case SL_RESULT_CONTENT_CORRUPTED: return "Content corrupted";
case SL_RESULT_CONTENT_UNSUPPORTED: return "Content unsupported";
case SL_RESULT_CONTENT_NOT_FOUND: return "Content not found";
case SL_RESULT_PERMISSION_DENIED: return "Permission denied";
case SL_RESULT_FEATURE_UNSUPPORTED: return "Feature unsupported";
case SL_RESULT_INTERNAL_ERROR: return "Internal error";
case SL_RESULT_UNKNOWN_ERROR: return "Unknown error";
case SL_RESULT_OPERATION_ABORTED: return "Operation aborted";
case SL_RESULT_CONTROL_LOST: return "Control lost";
#ifdef SL_RESULT_READONLY
case SL_RESULT_READONLY: return "ReadOnly";
#endif
#ifdef SL_RESULT_ENGINEOPTION_UNSUPPORTED
case SL_RESULT_ENGINEOPTION_UNSUPPORTED: return "Engine option unsupported";
#endif
#ifdef SL_RESULT_SOURCE_SINK_INCOMPATIBLE
case SL_RESULT_SOURCE_SINK_INCOMPATIBLE: return "Source/Sink incompatible";
#endif
}
return "Unknown error code";
}
#define PRINTERR(x, s) do { \
if UNLIKELY((x) != SL_RESULT_SUCCESS) \
ERR("%s: %s\n", (s), res_str((x))); \
} while(0)
struct OpenSLPlayback final : public BackendBase {
OpenSLPlayback(ALCdevice *device) noexcept : BackendBase{device} { }
~OpenSLPlayback() override;
void process(SLAndroidSimpleBufferQueueItf bq) noexcept;
static void processC(SLAndroidSimpleBufferQueueItf bq, void *context) noexcept
{ static_cast<OpenSLPlayback*>(context)->process(bq); }
int mixerProc();
void open(const char *name) override;
bool reset() override;
void start() override;
void stop() override;
ClockLatency getClockLatency() override;
/* engine interfaces */
SLObjectItf mEngineObj{nullptr};
SLEngineItf mEngine{nullptr};
/* output mix interfaces */
SLObjectItf mOutputMix{nullptr};
/* buffer queue player interfaces */
SLObjectItf mBufferQueueObj{nullptr};
RingBufferPtr mRing{nullptr};
al::semaphore mSem;
std::mutex mMutex;
uint mFrameSize{0};
std::atomic<bool> mKillNow{true};
std::thread mThread;
DEF_NEWDEL(OpenSLPlayback)
};
OpenSLPlayback::~OpenSLPlayback()
{
if(mBufferQueueObj)
VCALL0(mBufferQueueObj,Destroy)();
mBufferQueueObj = nullptr;
if(mOutputMix)
VCALL0(mOutputMix,Destroy)();
mOutputMix = nullptr;
if(mEngineObj)
VCALL0(mEngineObj,Destroy)();
mEngineObj = nullptr;
mEngine = nullptr;
}
/* this callback handler is called every time a buffer finishes playing */
void OpenSLPlayback::process(SLAndroidSimpleBufferQueueItf) noexcept
{
/* A note on the ringbuffer usage: The buffer queue seems to hold on to the
* pointer passed to the Enqueue method, rather than copying the audio.
* Consequently, the ringbuffer contains the audio that is currently queued
* and waiting to play. This process() callback is called when a buffer is
* finished, so we simply move the read pointer up to indicate the space is
* available for writing again, and wake up the mixer thread to mix and
* queue more audio.
*/
mRing->readAdvance(1);
mSem.post();
}
int OpenSLPlayback::mixerProc()
{
SetRTPriority();
althrd_setname(MIXER_THREAD_NAME);
SLPlayItf player;
SLAndroidSimpleBufferQueueItf bufferQueue;
SLresult result{VCALL(mBufferQueueObj,GetInterface)(SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
&bufferQueue)};
PRINTERR(result, "bufferQueue->GetInterface SL_IID_ANDROIDSIMPLEBUFFERQUEUE");
if(SL_RESULT_SUCCESS == result)
{
result = VCALL(mBufferQueueObj,GetInterface)(SL_IID_PLAY, &player);
PRINTERR(result, "bufferQueue->GetInterface SL_IID_PLAY");
}
const size_t frame_step{mDevice->channelsFromFmt()};
if(SL_RESULT_SUCCESS != result)
mDevice->handleDisconnect("Failed to get playback buffer: 0x%08x", result);
while(SL_RESULT_SUCCESS == result && !mKillNow.load(std::memory_order_acquire)
&& mDevice->Connected.load(std::memory_order_acquire))
{
if(mRing->writeSpace() == 0)
{
SLuint32 state{0};
result = VCALL(player,GetPlayState)(&state);
PRINTERR(result, "player->GetPlayState");
if(SL_RESULT_SUCCESS == result && state != SL_PLAYSTATE_PLAYING)
{
result = VCALL(player,SetPlayState)(SL_PLAYSTATE_PLAYING);
PRINTERR(result, "player->SetPlayState");
}
if(SL_RESULT_SUCCESS != result)
{
mDevice->handleDisconnect("Failed to start playback: 0x%08x", result);
break;
}
if(mRing->writeSpace() == 0)
{
mSem.wait();
continue;
}
}
std::unique_lock<std::mutex> dlock{mMutex};
auto data = mRing->getWriteVector();
mDevice->renderSamples(data.first.buf,
static_cast<uint>(data.first.len)*mDevice->UpdateSize, frame_step);
if(data.second.len > 0)
mDevice->renderSamples(data.second.buf,
static_cast<uint>(data.second.len)*mDevice->UpdateSize, frame_step);
size_t todo{data.first.len + data.second.len};
mRing->writeAdvance(todo);
dlock.unlock();
for(size_t i{0};i < todo;i++)
{
if(!data.first.len)
{
data.first = data.second;
data.second.buf = nullptr;
data.second.len = 0;
}
result = VCALL(bufferQueue,Enqueue)(data.first.buf, mDevice->UpdateSize*mFrameSize);
PRINTERR(result, "bufferQueue->Enqueue");
if(SL_RESULT_SUCCESS != result)
{
mDevice->handleDisconnect("Failed to queue audio: 0x%08x", result);
break;
}
data.first.len--;
data.first.buf += mDevice->UpdateSize*mFrameSize;
}
}
return 0;
}
void OpenSLPlayback::open(const char *name)
{
if(!name)
name = opensl_device;
else if(strcmp(name, opensl_device) != 0)
throw al::backend_exception{al::backend_error::NoDevice, "Device name \"%s\" not found",
name};
// create engine
SLresult result{slCreateEngine(&mEngineObj, 0, nullptr, 0, nullptr, nullptr)};
PRINTERR(result, "slCreateEngine");
if(SL_RESULT_SUCCESS == result)
{
result = VCALL(mEngineObj,Realize)(SL_BOOLEAN_FALSE);
PRINTERR(result, "engine->Realize");
}
if(SL_RESULT_SUCCESS == result)
{
result = VCALL(mEngineObj,GetInterface)(SL_IID_ENGINE, &mEngine);
PRINTERR(result, "engine->GetInterface");
}
if(SL_RESULT_SUCCESS == result)
{
result = VCALL(mEngine,CreateOutputMix)(&mOutputMix, 0, nullptr, nullptr);
PRINTERR(result, "engine->CreateOutputMix");
}
if(SL_RESULT_SUCCESS == result)
{
result = VCALL(mOutputMix,Realize)(SL_BOOLEAN_FALSE);
PRINTERR(result, "outputMix->Realize");
}
if(SL_RESULT_SUCCESS != result)
{
if(mOutputMix)
VCALL0(mOutputMix,Destroy)();
mOutputMix = nullptr;
if(mEngineObj)
VCALL0(mEngineObj,Destroy)();
mEngineObj = nullptr;
mEngine = nullptr;
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to initialize OpenSL device: 0x%08x", result};
}
mDevice->DeviceName = name;
}
bool OpenSLPlayback::reset()
{
SLresult result;
if(mBufferQueueObj)
VCALL0(mBufferQueueObj,Destroy)();
mBufferQueueObj = nullptr;
mRing = nullptr;
#if 0
if(!mDevice->Flags.get<FrequencyRequest>())
{
/* FIXME: Disabled until I figure out how to get the Context needed for
* the getSystemService call.
*/
JNIEnv *env = Android_GetJNIEnv();
jobject jctx = Android_GetContext();
/* Get necessary stuff for using java.lang.Integer,
* android.content.Context, and android.media.AudioManager.
*/
jclass int_cls = JCALL(env,FindClass)("java/lang/Integer");
jmethodID int_parseint = JCALL(env,GetStaticMethodID)(int_cls,
"parseInt", "(Ljava/lang/String;)I"
);
TRACE("Integer: %p, parseInt: %p\n", int_cls, int_parseint);
jclass ctx_cls = JCALL(env,FindClass)("android/content/Context");
jfieldID ctx_audsvc = JCALL(env,GetStaticFieldID)(ctx_cls,
"AUDIO_SERVICE", "Ljava/lang/String;"
);
jmethodID ctx_getSysSvc = JCALL(env,GetMethodID)(ctx_cls,
"getSystemService", "(Ljava/lang/String;)Ljava/lang/Object;"
);
TRACE("Context: %p, AUDIO_SERVICE: %p, getSystemService: %p\n",
ctx_cls, ctx_audsvc, ctx_getSysSvc);
jclass audmgr_cls = JCALL(env,FindClass)("android/media/AudioManager");
jfieldID audmgr_prop_out_srate = JCALL(env,GetStaticFieldID)(audmgr_cls,
"PROPERTY_OUTPUT_SAMPLE_RATE", "Ljava/lang/String;"
);
jmethodID audmgr_getproperty = JCALL(env,GetMethodID)(audmgr_cls,
"getProperty", "(Ljava/lang/String;)Ljava/lang/String;"
);
TRACE("AudioManager: %p, PROPERTY_OUTPUT_SAMPLE_RATE: %p, getProperty: %p\n",
audmgr_cls, audmgr_prop_out_srate, audmgr_getproperty);
const char *strchars;
jstring strobj;
/* Now make the calls. */
//AudioManager audMgr = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
strobj = JCALL(env,GetStaticObjectField)(ctx_cls, ctx_audsvc);
jobject audMgr = JCALL(env,CallObjectMethod)(jctx, ctx_getSysSvc, strobj);
strchars = JCALL(env,GetStringUTFChars)(strobj, nullptr);
TRACE("Context.getSystemService(%s) = %p\n", strchars, audMgr);
JCALL(env,ReleaseStringUTFChars)(strobj, strchars);
//String srateStr = audMgr.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
strobj = JCALL(env,GetStaticObjectField)(audmgr_cls, audmgr_prop_out_srate);
jstring srateStr = JCALL(env,CallObjectMethod)(audMgr, audmgr_getproperty, strobj);
strchars = JCALL(env,GetStringUTFChars)(strobj, nullptr);
TRACE("audMgr.getProperty(%s) = %p\n", strchars, srateStr);
JCALL(env,ReleaseStringUTFChars)(strobj, strchars);
//int sampleRate = Integer.parseInt(srateStr);
sampleRate = JCALL(env,CallStaticIntMethod)(int_cls, int_parseint, srateStr);
strchars = JCALL(env,GetStringUTFChars)(srateStr, nullptr);
TRACE("Got system sample rate %uhz (%s)\n", sampleRate, strchars);
JCALL(env,ReleaseStringUTFChars)(srateStr, strchars);
if(!sampleRate) sampleRate = device->Frequency;
else sampleRate = maxu(sampleRate, MIN_OUTPUT_RATE);
}
#endif
mDevice->FmtChans = DevFmtStereo;
mDevice->FmtType = DevFmtShort;
setDefaultWFXChannelOrder();
mFrameSize = mDevice->frameSizeFromFmt();
const std::array<SLInterfaceID,2> ids{{ SL_IID_ANDROIDSIMPLEBUFFERQUEUE, SL_IID_ANDROIDCONFIGURATION }};
const std::array<SLboolean,2> reqs{{ SL_BOOLEAN_TRUE, SL_BOOLEAN_FALSE }};
SLDataLocator_OutputMix loc_outmix{};
loc_outmix.locatorType = SL_DATALOCATOR_OUTPUTMIX;
loc_outmix.outputMix = mOutputMix;
SLDataSink audioSnk{};
audioSnk.pLocator = &loc_outmix;
audioSnk.pFormat = nullptr;
SLDataLocator_AndroidSimpleBufferQueue loc_bufq{};
loc_bufq.locatorType = SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE;
loc_bufq.numBuffers = mDevice->BufferSize / mDevice->UpdateSize;
SLDataSource audioSrc{};
#ifdef SL_ANDROID_DATAFORMAT_PCM_EX
SLAndroidDataFormat_PCM_EX format_pcm_ex{};
format_pcm_ex.formatType = SL_ANDROID_DATAFORMAT_PCM_EX;
format_pcm_ex.numChannels = mDevice->channelsFromFmt();
format_pcm_ex.sampleRate = mDevice->Frequency * 1000;
format_pcm_ex.bitsPerSample = mDevice->bytesFromFmt() * 8;
format_pcm_ex.containerSize = format_pcm_ex.bitsPerSample;
format_pcm_ex.channelMask = GetChannelMask(mDevice->FmtChans);
format_pcm_ex.endianness = GetByteOrderEndianness();
format_pcm_ex.representation = GetTypeRepresentation(mDevice->FmtType);
audioSrc.pLocator = &loc_bufq;
audioSrc.pFormat = &format_pcm_ex;
result = VCALL(mEngine,CreateAudioPlayer)(&mBufferQueueObj, &audioSrc, &audioSnk, ids.size(),
ids.data(), reqs.data());
if(SL_RESULT_SUCCESS != result)
#endif
{
/* Alter sample type according to what SLDataFormat_PCM can support. */
switch(mDevice->FmtType)
{
case DevFmtByte: mDevice->FmtType = DevFmtUByte; break;
case DevFmtUInt: mDevice->FmtType = DevFmtInt; break;
case DevFmtFloat:
case DevFmtUShort: mDevice->FmtType = DevFmtShort; break;
case DevFmtUByte:
case DevFmtShort:
case DevFmtInt:
break;
}
SLDataFormat_PCM format_pcm{};
format_pcm.formatType = SL_DATAFORMAT_PCM;
format_pcm.numChannels = mDevice->channelsFromFmt();
format_pcm.samplesPerSec = mDevice->Frequency * 1000;
format_pcm.bitsPerSample = mDevice->bytesFromFmt() * 8;
format_pcm.containerSize = format_pcm.bitsPerSample;
format_pcm.channelMask = GetChannelMask(mDevice->FmtChans);
format_pcm.endianness = GetByteOrderEndianness();
audioSrc.pLocator = &loc_bufq;
audioSrc.pFormat = &format_pcm;
result = VCALL(mEngine,CreateAudioPlayer)(&mBufferQueueObj, &audioSrc, &audioSnk, ids.size(),
ids.data(), reqs.data());
PRINTERR(result, "engine->CreateAudioPlayer");
}
if(SL_RESULT_SUCCESS == result)
{
/* Set the stream type to "media" (games, music, etc), if possible. */
SLAndroidConfigurationItf config;
result = VCALL(mBufferQueueObj,GetInterface)(SL_IID_ANDROIDCONFIGURATION, &config);
PRINTERR(result, "bufferQueue->GetInterface SL_IID_ANDROIDCONFIGURATION");
if(SL_RESULT_SUCCESS == result)
{
SLint32 streamType = SL_ANDROID_STREAM_MEDIA;
result = VCALL(config,SetConfiguration)(SL_ANDROID_KEY_STREAM_TYPE, &streamType,
sizeof(streamType));
PRINTERR(result, "config->SetConfiguration");
}
/* Clear any error since this was optional. */
result = SL_RESULT_SUCCESS;
}
if(SL_RESULT_SUCCESS == result)
{
result = VCALL(mBufferQueueObj,Realize)(SL_BOOLEAN_FALSE);
PRINTERR(result, "bufferQueue->Realize");
}
if(SL_RESULT_SUCCESS == result)
{
const uint num_updates{mDevice->BufferSize / mDevice->UpdateSize};
mRing = RingBuffer::Create(num_updates, mFrameSize*mDevice->UpdateSize, true);
}
if(SL_RESULT_SUCCESS != result)
{
if(mBufferQueueObj)
VCALL0(mBufferQueueObj,Destroy)();
mBufferQueueObj = nullptr;
return false;
}
return true;
}
void OpenSLPlayback::start()
{
mRing->reset();
SLAndroidSimpleBufferQueueItf bufferQueue;
SLresult result{VCALL(mBufferQueueObj,GetInterface)(SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
&bufferQueue)};
PRINTERR(result, "bufferQueue->GetInterface");
if(SL_RESULT_SUCCESS == result)
{
result = VCALL(bufferQueue,RegisterCallback)(&OpenSLPlayback::processC, this);
PRINTERR(result, "bufferQueue->RegisterCallback");
}
if(SL_RESULT_SUCCESS != result)
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to register callback: 0x%08x", result};
try {
mKillNow.store(false, std::memory_order_release);
mThread = std::thread(std::mem_fn(&OpenSLPlayback::mixerProc), this);
}
catch(std::exception& e) {
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to start mixing thread: %s", e.what()};
}
}
void OpenSLPlayback::stop()
{
if(mKillNow.exchange(true, std::memory_order_acq_rel) || !mThread.joinable())
return;
mSem.post();
mThread.join();
SLPlayItf player;
SLresult result{VCALL(mBufferQueueObj,GetInterface)(SL_IID_PLAY, &player)};
PRINTERR(result, "bufferQueue->GetInterface");
if(SL_RESULT_SUCCESS == result)
{
result = VCALL(player,SetPlayState)(SL_PLAYSTATE_STOPPED);
PRINTERR(result, "player->SetPlayState");
}
SLAndroidSimpleBufferQueueItf bufferQueue;
result = VCALL(mBufferQueueObj,GetInterface)(SL_IID_ANDROIDSIMPLEBUFFERQUEUE, &bufferQueue);
PRINTERR(result, "bufferQueue->GetInterface");
if(SL_RESULT_SUCCESS == result)
{
result = VCALL0(bufferQueue,Clear)();
PRINTERR(result, "bufferQueue->Clear");
}
if(SL_RESULT_SUCCESS == result)
{
result = VCALL(bufferQueue,RegisterCallback)(nullptr, nullptr);
PRINTERR(result, "bufferQueue->RegisterCallback");
}
if(SL_RESULT_SUCCESS == result)
{
SLAndroidSimpleBufferQueueState state;
do {
std::this_thread::yield();
result = VCALL(bufferQueue,GetState)(&state);
} while(SL_RESULT_SUCCESS == result && state.count > 0);
PRINTERR(result, "bufferQueue->GetState");
}
}
ClockLatency OpenSLPlayback::getClockLatency()
{
ClockLatency ret;
std::lock_guard<std::mutex> _{mMutex};
ret.ClockTime = GetDeviceClockTime(mDevice);
ret.Latency = std::chrono::seconds{mRing->readSpace() * mDevice->UpdateSize};
ret.Latency /= mDevice->Frequency;
return ret;
}
struct OpenSLCapture final : public BackendBase {
OpenSLCapture(ALCdevice *device) noexcept : BackendBase{device} { }
~OpenSLCapture() override;
void process(SLAndroidSimpleBufferQueueItf bq) noexcept;
static void processC(SLAndroidSimpleBufferQueueItf bq, void *context) noexcept
{ static_cast<OpenSLCapture*>(context)->process(bq); }
void open(const char *name) override;
void start() override;
void stop() override;
void captureSamples(al::byte *buffer, uint samples) override;
uint availableSamples() override;
/* engine interfaces */
SLObjectItf mEngineObj{nullptr};
SLEngineItf mEngine;
/* recording interfaces */
SLObjectItf mRecordObj{nullptr};
RingBufferPtr mRing{nullptr};
uint mSplOffset{0u};
uint mFrameSize{0};
DEF_NEWDEL(OpenSLCapture)
};
OpenSLCapture::~OpenSLCapture()
{
if(mRecordObj)
VCALL0(mRecordObj,Destroy)();
mRecordObj = nullptr;
if(mEngineObj)
VCALL0(mEngineObj,Destroy)();
mEngineObj = nullptr;
mEngine = nullptr;
}
void OpenSLCapture::process(SLAndroidSimpleBufferQueueItf) noexcept
{
/* A new chunk has been written into the ring buffer, advance it. */
mRing->writeAdvance(1);
}
void OpenSLCapture::open(const char* name)
{
if(!name)
name = opensl_device;
else if(strcmp(name, opensl_device) != 0)
throw al::backend_exception{al::backend_error::NoDevice, "Device name \"%s\" not found",
name};
SLresult result{slCreateEngine(&mEngineObj, 0, nullptr, 0, nullptr, nullptr)};
PRINTERR(result, "slCreateEngine");
if(SL_RESULT_SUCCESS == result)
{
result = VCALL(mEngineObj,Realize)(SL_BOOLEAN_FALSE);
PRINTERR(result, "engine->Realize");
}
if(SL_RESULT_SUCCESS == result)
{
result = VCALL(mEngineObj,GetInterface)(SL_IID_ENGINE, &mEngine);
PRINTERR(result, "engine->GetInterface");
}
if(SL_RESULT_SUCCESS == result)
{
mFrameSize = mDevice->frameSizeFromFmt();
/* Ensure the total length is at least 100ms */
uint length{maxu(mDevice->BufferSize, mDevice->Frequency/10)};
/* Ensure the per-chunk length is at least 10ms, and no more than 50ms. */
uint update_len{clampu(mDevice->BufferSize/3, mDevice->Frequency/100,
mDevice->Frequency/100*5)};
uint num_updates{(length+update_len-1) / update_len};
mRing = RingBuffer::Create(num_updates, update_len*mFrameSize, false);
mDevice->UpdateSize = update_len;
mDevice->BufferSize = static_cast<uint>(mRing->writeSpace() * update_len);
}
if(SL_RESULT_SUCCESS == result)
{
const std::array<SLInterfaceID,2> ids{{ SL_IID_ANDROIDSIMPLEBUFFERQUEUE, SL_IID_ANDROIDCONFIGURATION }};
const std::array<SLboolean,2> reqs{{ SL_BOOLEAN_TRUE, SL_BOOLEAN_FALSE }};
SLDataLocator_IODevice loc_dev{};
loc_dev.locatorType = SL_DATALOCATOR_IODEVICE;
loc_dev.deviceType = SL_IODEVICE_AUDIOINPUT;
loc_dev.deviceID = SL_DEFAULTDEVICEID_AUDIOINPUT;
loc_dev.device = nullptr;
SLDataSource audioSrc{};
audioSrc.pLocator = &loc_dev;
audioSrc.pFormat = nullptr;
SLDataLocator_AndroidSimpleBufferQueue loc_bq{};
loc_bq.locatorType = SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE;
loc_bq.numBuffers = mDevice->BufferSize / mDevice->UpdateSize;
SLDataSink audioSnk{};
#ifdef SL_ANDROID_DATAFORMAT_PCM_EX
SLAndroidDataFormat_PCM_EX format_pcm_ex{};
format_pcm_ex.formatType = SL_ANDROID_DATAFORMAT_PCM_EX;
format_pcm_ex.numChannels = mDevice->channelsFromFmt();
format_pcm_ex.sampleRate = mDevice->Frequency * 1000;
format_pcm_ex.bitsPerSample = mDevice->bytesFromFmt() * 8;
format_pcm_ex.containerSize = format_pcm_ex.bitsPerSample;
format_pcm_ex.channelMask = GetChannelMask(mDevice->FmtChans);
format_pcm_ex.endianness = GetByteOrderEndianness();
format_pcm_ex.representation = GetTypeRepresentation(mDevice->FmtType);
audioSnk.pLocator = &loc_bq;
audioSnk.pFormat = &format_pcm_ex;
result = VCALL(mEngine,CreateAudioRecorder)(&mRecordObj, &audioSrc, &audioSnk,
ids.size(), ids.data(), reqs.data());
if(SL_RESULT_SUCCESS != result)
#endif
{
/* Fallback to SLDataFormat_PCM only if it supports the desired
* sample type.
*/
if(mDevice->FmtType == DevFmtUByte || mDevice->FmtType == DevFmtShort
|| mDevice->FmtType == DevFmtInt)
{
SLDataFormat_PCM format_pcm{};
format_pcm.formatType = SL_DATAFORMAT_PCM;
format_pcm.numChannels = mDevice->channelsFromFmt();
format_pcm.samplesPerSec = mDevice->Frequency * 1000;
format_pcm.bitsPerSample = mDevice->bytesFromFmt() * 8;
format_pcm.containerSize = format_pcm.bitsPerSample;
format_pcm.channelMask = GetChannelMask(mDevice->FmtChans);
format_pcm.endianness = GetByteOrderEndianness();
audioSnk.pLocator = &loc_bq;
audioSnk.pFormat = &format_pcm;
result = VCALL(mEngine,CreateAudioRecorder)(&mRecordObj, &audioSrc, &audioSnk,
ids.size(), ids.data(), reqs.data());
}
PRINTERR(result, "engine->CreateAudioRecorder");
}
}
if(SL_RESULT_SUCCESS == result)
{
/* Set the record preset to "generic", if possible. */
SLAndroidConfigurationItf config;
result = VCALL(mRecordObj,GetInterface)(SL_IID_ANDROIDCONFIGURATION, &config);
PRINTERR(result, "recordObj->GetInterface SL_IID_ANDROIDCONFIGURATION");
if(SL_RESULT_SUCCESS == result)
{
SLuint32 preset = SL_ANDROID_RECORDING_PRESET_GENERIC;
result = VCALL(config,SetConfiguration)(SL_ANDROID_KEY_RECORDING_PRESET, &preset,
sizeof(preset));
PRINTERR(result, "config->SetConfiguration");
}
/* Clear any error since this was optional. */
result = SL_RESULT_SUCCESS;
}
if(SL_RESULT_SUCCESS == result)
{
result = VCALL(mRecordObj,Realize)(SL_BOOLEAN_FALSE);
PRINTERR(result, "recordObj->Realize");
}
SLAndroidSimpleBufferQueueItf bufferQueue;
if(SL_RESULT_SUCCESS == result)
{
result = VCALL(mRecordObj,GetInterface)(SL_IID_ANDROIDSIMPLEBUFFERQUEUE, &bufferQueue);
PRINTERR(result, "recordObj->GetInterface");
}
if(SL_RESULT_SUCCESS == result)
{
result = VCALL(bufferQueue,RegisterCallback)(&OpenSLCapture::processC, this);
PRINTERR(result, "bufferQueue->RegisterCallback");
}
if(SL_RESULT_SUCCESS == result)
{
const uint chunk_size{mDevice->UpdateSize * mFrameSize};
auto data = mRing->getWriteVector();
for(size_t i{0u};i < data.first.len && SL_RESULT_SUCCESS == result;i++)
{
result = VCALL(bufferQueue,Enqueue)(data.first.buf + chunk_size*i, chunk_size);
PRINTERR(result, "bufferQueue->Enqueue");
}
for(size_t i{0u};i < data.second.len && SL_RESULT_SUCCESS == result;i++)
{
result = VCALL(bufferQueue,Enqueue)(data.second.buf + chunk_size*i, chunk_size);
PRINTERR(result, "bufferQueue->Enqueue");
}
}
if(SL_RESULT_SUCCESS != result)
{
if(mRecordObj)
VCALL0(mRecordObj,Destroy)();
mRecordObj = nullptr;
if(mEngineObj)
VCALL0(mEngineObj,Destroy)();
mEngineObj = nullptr;
mEngine = nullptr;
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to initialize OpenSL device: 0x%08x", result};
}
mDevice->DeviceName = name;
}
void OpenSLCapture::start()
{
SLRecordItf record;
SLresult result{VCALL(mRecordObj,GetInterface)(SL_IID_RECORD, &record)};
PRINTERR(result, "recordObj->GetInterface");
if(SL_RESULT_SUCCESS == result)
{
result = VCALL(record,SetRecordState)(SL_RECORDSTATE_RECORDING);
PRINTERR(result, "record->SetRecordState");
}
if(SL_RESULT_SUCCESS != result)
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to start capture: 0x%08x", result};
}
void OpenSLCapture::stop()
{
SLRecordItf record;
SLresult result{VCALL(mRecordObj,GetInterface)(SL_IID_RECORD, &record)};
PRINTERR(result, "recordObj->GetInterface");
if(SL_RESULT_SUCCESS == result)
{
result = VCALL(record,SetRecordState)(SL_RECORDSTATE_PAUSED);
PRINTERR(result, "record->SetRecordState");
}
}
void OpenSLCapture::captureSamples(al::byte *buffer, uint samples)
{
const uint update_size{mDevice->UpdateSize};
const uint chunk_size{update_size * mFrameSize};
/* Read the desired samples from the ring buffer then advance its read
* pointer.
*/
size_t adv_count{0};
auto rdata = mRing->getReadVector();
for(uint i{0};i < samples;)
{
const uint rem{minu(samples - i, update_size - mSplOffset)};
std::copy_n(rdata.first.buf + mSplOffset*size_t{mFrameSize}, rem*size_t{mFrameSize},
buffer + i*size_t{mFrameSize});
mSplOffset += rem;
if(mSplOffset == update_size)
{
/* Finished a chunk, reset the offset and advance the read pointer. */
mSplOffset = 0;
++adv_count;
rdata.first.len -= 1;
if(!rdata.first.len)
rdata.first = rdata.second;
else
rdata.first.buf += chunk_size;
}
i += rem;
}
mRing->readAdvance(adv_count);
SLAndroidSimpleBufferQueueItf bufferQueue{};
if LIKELY(mDevice->Connected.load(std::memory_order_acquire))
{
const SLresult result{VCALL(mRecordObj,GetInterface)(SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
&bufferQueue)};
PRINTERR(result, "recordObj->GetInterface");
if UNLIKELY(SL_RESULT_SUCCESS != result)
{
mDevice->handleDisconnect("Failed to get capture buffer queue: 0x%08x", result);
bufferQueue = nullptr;
}
}
if LIKELY(bufferQueue)
{
SLresult result{SL_RESULT_SUCCESS};
auto wdata = mRing->getWriteVector();
for(size_t i{0u};i < wdata.first.len && SL_RESULT_SUCCESS == result;i++)
{
result = VCALL(bufferQueue,Enqueue)(wdata.first.buf + chunk_size*i, chunk_size);
PRINTERR(result, "bufferQueue->Enqueue");
}
for(size_t i{0u};i < wdata.second.len && SL_RESULT_SUCCESS == result;i++)
{
result = VCALL(bufferQueue,Enqueue)(wdata.second.buf + chunk_size*i, chunk_size);
PRINTERR(result, "bufferQueue->Enqueue");
}
}
}
uint OpenSLCapture::availableSamples()
{ return static_cast<uint>(mRing->readSpace()*mDevice->UpdateSize - mSplOffset); }
} // namespace
bool OSLBackendFactory::init() { return true; }
bool OSLBackendFactory::querySupport(BackendType type)
{ return (type == BackendType::Playback || type == BackendType::Capture); }
std::string OSLBackendFactory::probe(BackendType type)
{
std::string outnames;
switch(type)
{
case BackendType::Playback:
case BackendType::Capture:
/* Includes null char. */
outnames.append(opensl_device, sizeof(opensl_device));
break;
}
return outnames;
}
BackendPtr OSLBackendFactory::createBackend(ALCdevice *device, BackendType type)
{
if(type == BackendType::Playback)
return BackendPtr{new OpenSLPlayback{device}};
if(type == BackendType::Capture)
return BackendPtr{new OpenSLCapture{device}};
return nullptr;
}
BackendFactory &OSLBackendFactory::getFactory()
{
static OSLBackendFactory factory{};
return factory;
}

View file

@ -0,0 +1,19 @@
#ifndef BACKENDS_OSL_H
#define BACKENDS_OSL_H
#include "backends/base.h"
struct OSLBackendFactory final : public BackendFactory {
public:
bool init() override;
bool querySupport(BackendType type) override;
std::string probe(BackendType type) override;
BackendPtr createBackend(ALCdevice *device, BackendType type) override;
static BackendFactory &getFactory();
};
#endif /* BACKENDS_OSL_H */

View file

@ -1,878 +0,0 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 1999-2007 by authors.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <sys/ioctl.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <memory.h>
#include <unistd.h>
#include <errno.h>
#include <math.h>
#include "alMain.h"
#include "alu.h"
#include "alconfig.h"
#include "ringbuffer.h"
#include "threads.h"
#include "compat.h"
#include "backends/base.h"
#include <sys/soundcard.h>
/*
* The OSS documentation talks about SOUND_MIXER_READ, but the header
* only contains MIXER_READ. Play safe. Same for WRITE.
*/
#ifndef SOUND_MIXER_READ
#define SOUND_MIXER_READ MIXER_READ
#endif
#ifndef SOUND_MIXER_WRITE
#define SOUND_MIXER_WRITE MIXER_WRITE
#endif
#if defined(SOUND_VERSION) && (SOUND_VERSION < 0x040000)
#define ALC_OSS_COMPAT
#endif
#ifndef SNDCTL_AUDIOINFO
#define ALC_OSS_COMPAT
#endif
/*
* FreeBSD strongly discourages the use of specific devices,
* such as those returned in oss_audioinfo.devnode
*/
#ifdef __FreeBSD__
#define ALC_OSS_DEVNODE_TRUC
#endif
struct oss_device {
const ALCchar *handle;
const char *path;
struct oss_device *next;
};
static struct oss_device oss_playback = {
"OSS Default",
"/dev/dsp",
NULL
};
static struct oss_device oss_capture = {
"OSS Default",
"/dev/dsp",
NULL
};
#ifdef ALC_OSS_COMPAT
#define DSP_CAP_OUTPUT 0x00020000
#define DSP_CAP_INPUT 0x00010000
static void ALCossListPopulate(struct oss_device *UNUSED(devlist), int UNUSED(type_flag))
{
}
#else
#ifndef HAVE_STRNLEN
static size_t strnlen(const char *str, size_t maxlen)
{
const char *end = memchr(str, 0, maxlen);
if(!end) return maxlen;
return end - str;
}
#endif
static void ALCossListAppend(struct oss_device *list, const char *handle, size_t hlen, const char *path, size_t plen)
{
struct oss_device *next;
struct oss_device *last;
size_t i;
/* skip the first item "OSS Default" */
last = list;
next = list->next;
#ifdef ALC_OSS_DEVNODE_TRUC
for(i = 0;i < plen;i++)
{
if(path[i] == '.')
{
if(strncmp(path + i, handle + hlen + i - plen, plen - i) == 0)
hlen = hlen + i - plen;
plen = i;
}
}
#else
(void)i;
#endif
if(handle[0] == '\0')
{
handle = path;
hlen = plen;
}
while(next != NULL)
{
if(strncmp(next->path, path, plen) == 0)
return;
last = next;
next = next->next;
}
next = (struct oss_device*)malloc(sizeof(struct oss_device) + hlen + plen + 2);
next->handle = (char*)(next + 1);
next->path = next->handle + hlen + 1;
next->next = NULL;
last->next = next;
strncpy((char*)next->handle, handle, hlen);
((char*)next->handle)[hlen] = '\0';
strncpy((char*)next->path, path, plen);
((char*)next->path)[plen] = '\0';
TRACE("Got device \"%s\", \"%s\"\n", next->handle, next->path);
}
static void ALCossListPopulate(struct oss_device *devlist, int type_flag)
{
struct oss_sysinfo si;
struct oss_audioinfo ai;
int fd, i;
if((fd=open("/dev/mixer", O_RDONLY)) < 0)
{
TRACE("Could not open /dev/mixer: %s\n", strerror(errno));
return;
}
if(ioctl(fd, SNDCTL_SYSINFO, &si) == -1)
{
TRACE("SNDCTL_SYSINFO failed: %s\n", strerror(errno));
goto done;
}
for(i = 0;i < si.numaudios;i++)
{
const char *handle;
size_t len;
ai.dev = i;
if(ioctl(fd, SNDCTL_AUDIOINFO, &ai) == -1)
{
ERR("SNDCTL_AUDIOINFO (%d) failed: %s\n", i, strerror(errno));
continue;
}
if(ai.devnode[0] == '\0')
continue;
if(ai.handle[0] != '\0')
{
len = strnlen(ai.handle, sizeof(ai.handle));
handle = ai.handle;
}
else
{
len = strnlen(ai.name, sizeof(ai.name));
handle = ai.name;
}
if((ai.caps&type_flag))
ALCossListAppend(devlist, handle, len, ai.devnode,
strnlen(ai.devnode, sizeof(ai.devnode)));
}
done:
close(fd);
}
#endif
static void ALCossListFree(struct oss_device *list)
{
struct oss_device *cur;
if(list == NULL)
return;
/* skip the first item "OSS Default" */
cur = list->next;
list->next = NULL;
while(cur != NULL)
{
struct oss_device *next = cur->next;
free(cur);
cur = next;
}
}
static int log2i(ALCuint x)
{
int y = 0;
while (x > 1)
{
x >>= 1;
y++;
}
return y;
}
typedef struct ALCplaybackOSS {
DERIVE_FROM_TYPE(ALCbackend);
int fd;
ALubyte *mix_data;
int data_size;
ATOMIC(ALenum) killNow;
althrd_t thread;
} ALCplaybackOSS;
static int ALCplaybackOSS_mixerProc(void *ptr);
static void ALCplaybackOSS_Construct(ALCplaybackOSS *self, ALCdevice *device);
static void ALCplaybackOSS_Destruct(ALCplaybackOSS *self);
static ALCenum ALCplaybackOSS_open(ALCplaybackOSS *self, const ALCchar *name);
static ALCboolean ALCplaybackOSS_reset(ALCplaybackOSS *self);
static ALCboolean ALCplaybackOSS_start(ALCplaybackOSS *self);
static void ALCplaybackOSS_stop(ALCplaybackOSS *self);
static DECLARE_FORWARD2(ALCplaybackOSS, ALCbackend, ALCenum, captureSamples, ALCvoid*, ALCuint)
static DECLARE_FORWARD(ALCplaybackOSS, ALCbackend, ALCuint, availableSamples)
static DECLARE_FORWARD(ALCplaybackOSS, ALCbackend, ClockLatency, getClockLatency)
static DECLARE_FORWARD(ALCplaybackOSS, ALCbackend, void, lock)
static DECLARE_FORWARD(ALCplaybackOSS, ALCbackend, void, unlock)
DECLARE_DEFAULT_ALLOCATORS(ALCplaybackOSS)
DEFINE_ALCBACKEND_VTABLE(ALCplaybackOSS);
static int ALCplaybackOSS_mixerProc(void *ptr)
{
ALCplaybackOSS *self = (ALCplaybackOSS*)ptr;
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
struct timeval timeout;
ALubyte *write_ptr;
ALint frame_size;
ALint to_write;
ssize_t wrote;
fd_set wfds;
int sret;
SetRTPriority();
althrd_setname(althrd_current(), MIXER_THREAD_NAME);
frame_size = FrameSizeFromDevFmt(device->FmtChans, device->FmtType, device->AmbiOrder);
ALCplaybackOSS_lock(self);
while(!ATOMIC_LOAD(&self->killNow, almemory_order_acquire) &&
ATOMIC_LOAD(&device->Connected, almemory_order_acquire))
{
FD_ZERO(&wfds);
FD_SET(self->fd, &wfds);
timeout.tv_sec = 1;
timeout.tv_usec = 0;
ALCplaybackOSS_unlock(self);
sret = select(self->fd+1, NULL, &wfds, NULL, &timeout);
ALCplaybackOSS_lock(self);
if(sret < 0)
{
if(errno == EINTR)
continue;
ERR("select failed: %s\n", strerror(errno));
aluHandleDisconnect(device, "Failed waiting for playback buffer: %s", strerror(errno));
break;
}
else if(sret == 0)
{
WARN("select timeout\n");
continue;
}
write_ptr = self->mix_data;
to_write = self->data_size;
aluMixData(device, write_ptr, to_write/frame_size);
while(to_write > 0 && !ATOMIC_LOAD_SEQ(&self->killNow))
{
wrote = write(self->fd, write_ptr, to_write);
if(wrote < 0)
{
if(errno == EAGAIN || errno == EWOULDBLOCK || errno == EINTR)
continue;
ERR("write failed: %s\n", strerror(errno));
aluHandleDisconnect(device, "Failed writing playback samples: %s",
strerror(errno));
break;
}
to_write -= wrote;
write_ptr += wrote;
}
}
ALCplaybackOSS_unlock(self);
return 0;
}
static void ALCplaybackOSS_Construct(ALCplaybackOSS *self, ALCdevice *device)
{
ALCbackend_Construct(STATIC_CAST(ALCbackend, self), device);
SET_VTABLE2(ALCplaybackOSS, ALCbackend, self);
self->fd = -1;
ATOMIC_INIT(&self->killNow, AL_FALSE);
}
static void ALCplaybackOSS_Destruct(ALCplaybackOSS *self)
{
if(self->fd != -1)
close(self->fd);
self->fd = -1;
ALCbackend_Destruct(STATIC_CAST(ALCbackend, self));
}
static ALCenum ALCplaybackOSS_open(ALCplaybackOSS *self, const ALCchar *name)
{
struct oss_device *dev = &oss_playback;
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
if(!name || strcmp(name, dev->handle) == 0)
name = dev->handle;
else
{
if(!dev->next)
{
ALCossListPopulate(&oss_playback, DSP_CAP_OUTPUT);
dev = &oss_playback;
}
while(dev != NULL)
{
if (strcmp(dev->handle, name) == 0)
break;
dev = dev->next;
}
if(dev == NULL)
{
WARN("Could not find \"%s\" in device list\n", name);
return ALC_INVALID_VALUE;
}
}
self->fd = open(dev->path, O_WRONLY);
if(self->fd == -1)
{
ERR("Could not open %s: %s\n", dev->path, strerror(errno));
return ALC_INVALID_VALUE;
}
alstr_copy_cstr(&device->DeviceName, name);
return ALC_NO_ERROR;
}
static ALCboolean ALCplaybackOSS_reset(ALCplaybackOSS *self)
{
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
int numFragmentsLogSize;
int log2FragmentSize;
unsigned int periods;
audio_buf_info info;
ALuint frameSize;
int numChannels;
int ossFormat;
int ossSpeed;
char *err;
switch(device->FmtType)
{
case DevFmtByte:
ossFormat = AFMT_S8;
break;
case DevFmtUByte:
ossFormat = AFMT_U8;
break;
case DevFmtUShort:
case DevFmtInt:
case DevFmtUInt:
case DevFmtFloat:
device->FmtType = DevFmtShort;
/* fall-through */
case DevFmtShort:
ossFormat = AFMT_S16_NE;
break;
}
periods = device->NumUpdates;
numChannels = ChannelsFromDevFmt(device->FmtChans, device->AmbiOrder);
ossSpeed = device->Frequency;
frameSize = numChannels * BytesFromDevFmt(device->FmtType);
/* According to the OSS spec, 16 bytes (log2(16)) is the minimum. */
log2FragmentSize = maxi(log2i(device->UpdateSize*frameSize), 4);
numFragmentsLogSize = (periods << 16) | log2FragmentSize;
#define CHECKERR(func) if((func) < 0) { \
err = #func; \
goto err; \
}
/* Don't fail if SETFRAGMENT fails. We can handle just about anything
* that's reported back via GETOSPACE */
ioctl(self->fd, SNDCTL_DSP_SETFRAGMENT, &numFragmentsLogSize);
CHECKERR(ioctl(self->fd, SNDCTL_DSP_SETFMT, &ossFormat));
CHECKERR(ioctl(self->fd, SNDCTL_DSP_CHANNELS, &numChannels));
CHECKERR(ioctl(self->fd, SNDCTL_DSP_SPEED, &ossSpeed));
CHECKERR(ioctl(self->fd, SNDCTL_DSP_GETOSPACE, &info));
if(0)
{
err:
ERR("%s failed: %s\n", err, strerror(errno));
return ALC_FALSE;
}
#undef CHECKERR
if((int)ChannelsFromDevFmt(device->FmtChans, device->AmbiOrder) != numChannels)
{
ERR("Failed to set %s, got %d channels instead\n", DevFmtChannelsString(device->FmtChans), numChannels);
return ALC_FALSE;
}
if(!((ossFormat == AFMT_S8 && device->FmtType == DevFmtByte) ||
(ossFormat == AFMT_U8 && device->FmtType == DevFmtUByte) ||
(ossFormat == AFMT_S16_NE && device->FmtType == DevFmtShort)))
{
ERR("Failed to set %s samples, got OSS format %#x\n", DevFmtTypeString(device->FmtType), ossFormat);
return ALC_FALSE;
}
device->Frequency = ossSpeed;
device->UpdateSize = info.fragsize / frameSize;
device->NumUpdates = info.fragments;
SetDefaultChannelOrder(device);
return ALC_TRUE;
}
static ALCboolean ALCplaybackOSS_start(ALCplaybackOSS *self)
{
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
self->data_size = device->UpdateSize * FrameSizeFromDevFmt(
device->FmtChans, device->FmtType, device->AmbiOrder
);
self->mix_data = calloc(1, self->data_size);
ATOMIC_STORE_SEQ(&self->killNow, AL_FALSE);
if(althrd_create(&self->thread, ALCplaybackOSS_mixerProc, self) != althrd_success)
{
free(self->mix_data);
self->mix_data = NULL;
return ALC_FALSE;
}
return ALC_TRUE;
}
static void ALCplaybackOSS_stop(ALCplaybackOSS *self)
{
int res;
if(ATOMIC_EXCHANGE_SEQ(&self->killNow, AL_TRUE))
return;
althrd_join(self->thread, &res);
if(ioctl(self->fd, SNDCTL_DSP_RESET) != 0)
ERR("Error resetting device: %s\n", strerror(errno));
free(self->mix_data);
self->mix_data = NULL;
}
typedef struct ALCcaptureOSS {
DERIVE_FROM_TYPE(ALCbackend);
int fd;
ll_ringbuffer_t *ring;
ATOMIC(ALenum) killNow;
althrd_t thread;
} ALCcaptureOSS;
static int ALCcaptureOSS_recordProc(void *ptr);
static void ALCcaptureOSS_Construct(ALCcaptureOSS *self, ALCdevice *device);
static void ALCcaptureOSS_Destruct(ALCcaptureOSS *self);
static ALCenum ALCcaptureOSS_open(ALCcaptureOSS *self, const ALCchar *name);
static DECLARE_FORWARD(ALCcaptureOSS, ALCbackend, ALCboolean, reset)
static ALCboolean ALCcaptureOSS_start(ALCcaptureOSS *self);
static void ALCcaptureOSS_stop(ALCcaptureOSS *self);
static ALCenum ALCcaptureOSS_captureSamples(ALCcaptureOSS *self, ALCvoid *buffer, ALCuint samples);
static ALCuint ALCcaptureOSS_availableSamples(ALCcaptureOSS *self);
static DECLARE_FORWARD(ALCcaptureOSS, ALCbackend, ClockLatency, getClockLatency)
static DECLARE_FORWARD(ALCcaptureOSS, ALCbackend, void, lock)
static DECLARE_FORWARD(ALCcaptureOSS, ALCbackend, void, unlock)
DECLARE_DEFAULT_ALLOCATORS(ALCcaptureOSS)
DEFINE_ALCBACKEND_VTABLE(ALCcaptureOSS);
static int ALCcaptureOSS_recordProc(void *ptr)
{
ALCcaptureOSS *self = (ALCcaptureOSS*)ptr;
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
struct timeval timeout;
int frame_size;
fd_set rfds;
ssize_t amt;
int sret;
SetRTPriority();
althrd_setname(althrd_current(), RECORD_THREAD_NAME);
frame_size = FrameSizeFromDevFmt(device->FmtChans, device->FmtType, device->AmbiOrder);
while(!ATOMIC_LOAD_SEQ(&self->killNow))
{
ll_ringbuffer_data_t vec[2];
FD_ZERO(&rfds);
FD_SET(self->fd, &rfds);
timeout.tv_sec = 1;
timeout.tv_usec = 0;
sret = select(self->fd+1, &rfds, NULL, NULL, &timeout);
if(sret < 0)
{
if(errno == EINTR)
continue;
ERR("select failed: %s\n", strerror(errno));
aluHandleDisconnect(device, "Failed to check capture samples: %s", strerror(errno));
break;
}
else if(sret == 0)
{
WARN("select timeout\n");
continue;
}
ll_ringbuffer_get_write_vector(self->ring, vec);
if(vec[0].len > 0)
{
amt = read(self->fd, vec[0].buf, vec[0].len*frame_size);
if(amt < 0)
{
ERR("read failed: %s\n", strerror(errno));
ALCcaptureOSS_lock(self);
aluHandleDisconnect(device, "Failed reading capture samples: %s", strerror(errno));
ALCcaptureOSS_unlock(self);
break;
}
ll_ringbuffer_write_advance(self->ring, amt/frame_size);
}
}
return 0;
}
static void ALCcaptureOSS_Construct(ALCcaptureOSS *self, ALCdevice *device)
{
ALCbackend_Construct(STATIC_CAST(ALCbackend, self), device);
SET_VTABLE2(ALCcaptureOSS, ALCbackend, self);
self->fd = -1;
self->ring = NULL;
ATOMIC_INIT(&self->killNow, AL_FALSE);
}
static void ALCcaptureOSS_Destruct(ALCcaptureOSS *self)
{
if(self->fd != -1)
close(self->fd);
self->fd = -1;
ll_ringbuffer_free(self->ring);
self->ring = NULL;
ALCbackend_Destruct(STATIC_CAST(ALCbackend, self));
}
static ALCenum ALCcaptureOSS_open(ALCcaptureOSS *self, const ALCchar *name)
{
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
struct oss_device *dev = &oss_capture;
int numFragmentsLogSize;
int log2FragmentSize;
unsigned int periods;
audio_buf_info info;
ALuint frameSize;
int numChannels;
int ossFormat;
int ossSpeed;
char *err;
if(!name || strcmp(name, dev->handle) == 0)
name = dev->handle;
else
{
if(!dev->next)
{
ALCossListPopulate(&oss_capture, DSP_CAP_INPUT);
dev = &oss_capture;
}
while(dev != NULL)
{
if (strcmp(dev->handle, name) == 0)
break;
dev = dev->next;
}
if(dev == NULL)
{
WARN("Could not find \"%s\" in device list\n", name);
return ALC_INVALID_VALUE;
}
}
self->fd = open(dev->path, O_RDONLY);
if(self->fd == -1)
{
ERR("Could not open %s: %s\n", dev->path, strerror(errno));
return ALC_INVALID_VALUE;
}
switch(device->FmtType)
{
case DevFmtByte:
ossFormat = AFMT_S8;
break;
case DevFmtUByte:
ossFormat = AFMT_U8;
break;
case DevFmtShort:
ossFormat = AFMT_S16_NE;
break;
case DevFmtUShort:
case DevFmtInt:
case DevFmtUInt:
case DevFmtFloat:
ERR("%s capture samples not supported\n", DevFmtTypeString(device->FmtType));
return ALC_INVALID_VALUE;
}
periods = 4;
numChannels = ChannelsFromDevFmt(device->FmtChans, device->AmbiOrder);
frameSize = numChannels * BytesFromDevFmt(device->FmtType);
ossSpeed = device->Frequency;
log2FragmentSize = log2i(device->UpdateSize * device->NumUpdates *
frameSize / periods);
/* according to the OSS spec, 16 bytes are the minimum */
if (log2FragmentSize < 4)
log2FragmentSize = 4;
numFragmentsLogSize = (periods << 16) | log2FragmentSize;
#define CHECKERR(func) if((func) < 0) { \
err = #func; \
goto err; \
}
CHECKERR(ioctl(self->fd, SNDCTL_DSP_SETFRAGMENT, &numFragmentsLogSize));
CHECKERR(ioctl(self->fd, SNDCTL_DSP_SETFMT, &ossFormat));
CHECKERR(ioctl(self->fd, SNDCTL_DSP_CHANNELS, &numChannels));
CHECKERR(ioctl(self->fd, SNDCTL_DSP_SPEED, &ossSpeed));
CHECKERR(ioctl(self->fd, SNDCTL_DSP_GETISPACE, &info));
if(0)
{
err:
ERR("%s failed: %s\n", err, strerror(errno));
close(self->fd);
self->fd = -1;
return ALC_INVALID_VALUE;
}
#undef CHECKERR
if((int)ChannelsFromDevFmt(device->FmtChans, device->AmbiOrder) != numChannels)
{
ERR("Failed to set %s, got %d channels instead\n", DevFmtChannelsString(device->FmtChans), numChannels);
close(self->fd);
self->fd = -1;
return ALC_INVALID_VALUE;
}
if(!((ossFormat == AFMT_S8 && device->FmtType == DevFmtByte) ||
(ossFormat == AFMT_U8 && device->FmtType == DevFmtUByte) ||
(ossFormat == AFMT_S16_NE && device->FmtType == DevFmtShort)))
{
ERR("Failed to set %s samples, got OSS format %#x\n", DevFmtTypeString(device->FmtType), ossFormat);
close(self->fd);
self->fd = -1;
return ALC_INVALID_VALUE;
}
self->ring = ll_ringbuffer_create(device->UpdateSize*device->NumUpdates, frameSize, false);
if(!self->ring)
{
ERR("Ring buffer create failed\n");
close(self->fd);
self->fd = -1;
return ALC_OUT_OF_MEMORY;
}
alstr_copy_cstr(&device->DeviceName, name);
return ALC_NO_ERROR;
}
static ALCboolean ALCcaptureOSS_start(ALCcaptureOSS *self)
{
ATOMIC_STORE_SEQ(&self->killNow, AL_FALSE);
if(althrd_create(&self->thread, ALCcaptureOSS_recordProc, self) != althrd_success)
return ALC_FALSE;
return ALC_TRUE;
}
static void ALCcaptureOSS_stop(ALCcaptureOSS *self)
{
int res;
if(ATOMIC_EXCHANGE_SEQ(&self->killNow, AL_TRUE))
return;
althrd_join(self->thread, &res);
if(ioctl(self->fd, SNDCTL_DSP_RESET) != 0)
ERR("Error resetting device: %s\n", strerror(errno));
}
static ALCenum ALCcaptureOSS_captureSamples(ALCcaptureOSS *self, ALCvoid *buffer, ALCuint samples)
{
ll_ringbuffer_read(self->ring, buffer, samples);
return ALC_NO_ERROR;
}
static ALCuint ALCcaptureOSS_availableSamples(ALCcaptureOSS *self)
{
return ll_ringbuffer_read_space(self->ring);
}
typedef struct ALCossBackendFactory {
DERIVE_FROM_TYPE(ALCbackendFactory);
} ALCossBackendFactory;
#define ALCOSSBACKENDFACTORY_INITIALIZER { { GET_VTABLE2(ALCossBackendFactory, ALCbackendFactory) } }
ALCbackendFactory *ALCossBackendFactory_getFactory(void);
static ALCboolean ALCossBackendFactory_init(ALCossBackendFactory *self);
static void ALCossBackendFactory_deinit(ALCossBackendFactory *self);
static ALCboolean ALCossBackendFactory_querySupport(ALCossBackendFactory *self, ALCbackend_Type type);
static void ALCossBackendFactory_probe(ALCossBackendFactory *self, enum DevProbe type);
static ALCbackend* ALCossBackendFactory_createBackend(ALCossBackendFactory *self, ALCdevice *device, ALCbackend_Type type);
DEFINE_ALCBACKENDFACTORY_VTABLE(ALCossBackendFactory);
ALCbackendFactory *ALCossBackendFactory_getFactory(void)
{
static ALCossBackendFactory factory = ALCOSSBACKENDFACTORY_INITIALIZER;
return STATIC_CAST(ALCbackendFactory, &factory);
}
ALCboolean ALCossBackendFactory_init(ALCossBackendFactory* UNUSED(self))
{
ConfigValueStr(NULL, "oss", "device", &oss_playback.path);
ConfigValueStr(NULL, "oss", "capture", &oss_capture.path);
return ALC_TRUE;
}
void ALCossBackendFactory_deinit(ALCossBackendFactory* UNUSED(self))
{
ALCossListFree(&oss_playback);
ALCossListFree(&oss_capture);
}
ALCboolean ALCossBackendFactory_querySupport(ALCossBackendFactory* UNUSED(self), ALCbackend_Type type)
{
if(type == ALCbackend_Playback || type == ALCbackend_Capture)
return ALC_TRUE;
return ALC_FALSE;
}
void ALCossBackendFactory_probe(ALCossBackendFactory* UNUSED(self), enum DevProbe type)
{
struct oss_device *cur;
switch(type)
{
case ALL_DEVICE_PROBE:
ALCossListFree(&oss_playback);
ALCossListPopulate(&oss_playback, DSP_CAP_OUTPUT);
cur = &oss_playback;
while(cur != NULL)
{
#ifdef HAVE_STAT
struct stat buf;
if(stat(cur->path, &buf) == 0)
#endif
AppendAllDevicesList(cur->handle);
cur = cur->next;
}
break;
case CAPTURE_DEVICE_PROBE:
ALCossListFree(&oss_capture);
ALCossListPopulate(&oss_capture, DSP_CAP_INPUT);
cur = &oss_capture;
while(cur != NULL)
{
#ifdef HAVE_STAT
struct stat buf;
if(stat(cur->path, &buf) == 0)
#endif
AppendCaptureDeviceList(cur->handle);
cur = cur->next;
}
break;
}
}
ALCbackend* ALCossBackendFactory_createBackend(ALCossBackendFactory* UNUSED(self), ALCdevice *device, ALCbackend_Type type)
{
if(type == ALCbackend_Playback)
{
ALCplaybackOSS *backend;
NEW_OBJ(backend, ALCplaybackOSS)(device);
if(!backend) return NULL;
return STATIC_CAST(ALCbackend, backend);
}
if(type == ALCbackend_Capture)
{
ALCcaptureOSS *backend;
NEW_OBJ(backend, ALCcaptureOSS)(device);
if(!backend) return NULL;
return STATIC_CAST(ALCbackend, backend);
}
return NULL;
}

View file

@ -0,0 +1,686 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 1999-2007 by authors.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include "backends/oss.h"
#include <fcntl.h>
#include <poll.h>
#include <sys/ioctl.h>
#include <sys/stat.h>
#include <unistd.h>
#include <algorithm>
#include <atomic>
#include <cerrno>
#include <cstdio>
#include <cstring>
#include <exception>
#include <functional>
#include <memory>
#include <new>
#include <string>
#include <thread>
#include <utility>
#include "alcmain.h"
#include "alconfig.h"
#include "albyte.h"
#include "almalloc.h"
#include "alnumeric.h"
#include "aloptional.h"
#include "alu.h"
#include "core/logging.h"
#include "ringbuffer.h"
#include "threads.h"
#include "vector.h"
#include <sys/soundcard.h>
/*
* The OSS documentation talks about SOUND_MIXER_READ, but the header
* only contains MIXER_READ. Play safe. Same for WRITE.
*/
#ifndef SOUND_MIXER_READ
#define SOUND_MIXER_READ MIXER_READ
#endif
#ifndef SOUND_MIXER_WRITE
#define SOUND_MIXER_WRITE MIXER_WRITE
#endif
#if defined(SOUND_VERSION) && (SOUND_VERSION < 0x040000)
#define ALC_OSS_COMPAT
#endif
#ifndef SNDCTL_AUDIOINFO
#define ALC_OSS_COMPAT
#endif
/*
* FreeBSD strongly discourages the use of specific devices,
* such as those returned in oss_audioinfo.devnode
*/
#ifdef __FreeBSD__
#define ALC_OSS_DEVNODE_TRUC
#endif
namespace {
constexpr char DefaultName[] = "OSS Default";
std::string DefaultPlayback{"/dev/dsp"};
std::string DefaultCapture{"/dev/dsp"};
struct DevMap {
std::string name;
std::string device_name;
};
al::vector<DevMap> PlaybackDevices;
al::vector<DevMap> CaptureDevices;
#ifdef ALC_OSS_COMPAT
#define DSP_CAP_OUTPUT 0x00020000
#define DSP_CAP_INPUT 0x00010000
void ALCossListPopulate(al::vector<DevMap> &devlist, int type)
{
devlist.emplace_back(DevMap{DefaultName, (type==DSP_CAP_INPUT) ? DefaultCapture : DefaultPlayback});
}
#else
void ALCossListAppend(al::vector<DevMap> &list, al::span<const char> handle, al::span<const char> path)
{
#ifdef ALC_OSS_DEVNODE_TRUC
for(size_t i{0};i < path.size();++i)
{
if(path[i] == '.' && handle.size() + i >= path.size())
{
const size_t hoffset{handle.size() + i - path.size()};
if(strncmp(path.data() + i, handle.data() + hoffset, path.size() - i) == 0)
handle = handle.first(hoffset);
path = path.first(i);
}
}
#endif
if(handle.empty())
handle = path;
std::string basename{handle.data(), handle.size()};
std::string devname{path.data(), path.size()};
auto match_devname = [&devname](const DevMap &entry) -> bool
{ return entry.device_name == devname; };
if(std::find_if(list.cbegin(), list.cend(), match_devname) != list.cend())
return;
auto checkName = [&list](const std::string &name) -> bool
{
auto match_name = [&name](const DevMap &entry) -> bool { return entry.name == name; };
return std::find_if(list.cbegin(), list.cend(), match_name) != list.cend();
};
int count{1};
std::string newname{basename};
while(checkName(newname))
{
newname = basename;
newname += " #";
newname += std::to_string(++count);
}
list.emplace_back(DevMap{std::move(newname), std::move(devname)});
const DevMap &entry = list.back();
TRACE("Got device \"%s\", \"%s\"\n", entry.name.c_str(), entry.device_name.c_str());
}
void ALCossListPopulate(al::vector<DevMap> &devlist, int type_flag)
{
int fd{open("/dev/mixer", O_RDONLY)};
if(fd < 0)
{
TRACE("Could not open /dev/mixer: %s\n", strerror(errno));
goto done;
}
oss_sysinfo si;
if(ioctl(fd, SNDCTL_SYSINFO, &si) == -1)
{
TRACE("SNDCTL_SYSINFO failed: %s\n", strerror(errno));
goto done;
}
for(int i{0};i < si.numaudios;i++)
{
oss_audioinfo ai;
ai.dev = i;
if(ioctl(fd, SNDCTL_AUDIOINFO, &ai) == -1)
{
ERR("SNDCTL_AUDIOINFO (%d) failed: %s\n", i, strerror(errno));
continue;
}
if(!(ai.caps&type_flag) || ai.devnode[0] == '\0')
continue;
al::span<const char> handle;
if(ai.handle[0] != '\0')
handle = {ai.handle, strnlen(ai.handle, sizeof(ai.handle))};
else
handle = {ai.name, strnlen(ai.name, sizeof(ai.name))};
al::span<const char> devnode{ai.devnode, strnlen(ai.devnode, sizeof(ai.devnode))};
ALCossListAppend(devlist, handle, devnode);
}
done:
if(fd >= 0)
close(fd);
fd = -1;
const char *defdev{((type_flag==DSP_CAP_INPUT) ? DefaultCapture : DefaultPlayback).c_str()};
auto iter = std::find_if(devlist.cbegin(), devlist.cend(),
[defdev](const DevMap &entry) -> bool
{ return entry.device_name == defdev; }
);
if(iter == devlist.cend())
devlist.insert(devlist.begin(), DevMap{DefaultName, defdev});
else
{
DevMap entry{std::move(*iter)};
devlist.erase(iter);
devlist.insert(devlist.begin(), std::move(entry));
}
devlist.shrink_to_fit();
}
#endif
uint log2i(uint x)
{
uint y{0};
while(x > 1)
{
x >>= 1;
y++;
}
return y;
}
struct OSSPlayback final : public BackendBase {
OSSPlayback(ALCdevice *device) noexcept : BackendBase{device} { }
~OSSPlayback() override;
int mixerProc();
void open(const char *name) override;
bool reset() override;
void start() override;
void stop() override;
int mFd{-1};
al::vector<al::byte> mMixData;
std::atomic<bool> mKillNow{true};
std::thread mThread;
DEF_NEWDEL(OSSPlayback)
};
OSSPlayback::~OSSPlayback()
{
if(mFd != -1)
close(mFd);
mFd = -1;
}
int OSSPlayback::mixerProc()
{
SetRTPriority();
althrd_setname(MIXER_THREAD_NAME);
const size_t frame_step{mDevice->channelsFromFmt()};
const size_t frame_size{mDevice->frameSizeFromFmt()};
while(!mKillNow.load(std::memory_order_acquire)
&& mDevice->Connected.load(std::memory_order_acquire))
{
pollfd pollitem{};
pollitem.fd = mFd;
pollitem.events = POLLOUT;
int pret{poll(&pollitem, 1, 1000)};
if(pret < 0)
{
if(errno == EINTR || errno == EAGAIN)
continue;
ERR("poll failed: %s\n", strerror(errno));
mDevice->handleDisconnect("Failed waiting for playback buffer: %s", strerror(errno));
break;
}
else if(pret == 0)
{
WARN("poll timeout\n");
continue;
}
al::byte *write_ptr{mMixData.data()};
size_t to_write{mMixData.size()};
mDevice->renderSamples(write_ptr, static_cast<uint>(to_write/frame_size), frame_step);
while(to_write > 0 && !mKillNow.load(std::memory_order_acquire))
{
ssize_t wrote{write(mFd, write_ptr, to_write)};
if(wrote < 0)
{
if(errno == EAGAIN || errno == EWOULDBLOCK || errno == EINTR)
continue;
ERR("write failed: %s\n", strerror(errno));
mDevice->handleDisconnect("Failed writing playback samples: %s", strerror(errno));
break;
}
to_write -= static_cast<size_t>(wrote);
write_ptr += wrote;
}
}
return 0;
}
void OSSPlayback::open(const char *name)
{
const char *devname{DefaultPlayback.c_str()};
if(!name)
name = DefaultName;
else
{
if(PlaybackDevices.empty())
ALCossListPopulate(PlaybackDevices, DSP_CAP_OUTPUT);
auto iter = std::find_if(PlaybackDevices.cbegin(), PlaybackDevices.cend(),
[&name](const DevMap &entry) -> bool
{ return entry.name == name; }
);
if(iter == PlaybackDevices.cend())
throw al::backend_exception{al::backend_error::NoDevice,
"Device name \"%s\" not found", name};
devname = iter->device_name.c_str();
}
mFd = ::open(devname, O_WRONLY);
if(mFd == -1)
throw al::backend_exception{al::backend_error::NoDevice, "Could not open %s: %s", devname,
strerror(errno)};
mDevice->DeviceName = name;
}
bool OSSPlayback::reset()
{
int ossFormat{};
switch(mDevice->FmtType)
{
case DevFmtByte:
ossFormat = AFMT_S8;
break;
case DevFmtUByte:
ossFormat = AFMT_U8;
break;
case DevFmtUShort:
case DevFmtInt:
case DevFmtUInt:
case DevFmtFloat:
mDevice->FmtType = DevFmtShort;
/* fall-through */
case DevFmtShort:
ossFormat = AFMT_S16_NE;
break;
}
uint periods{mDevice->BufferSize / mDevice->UpdateSize};
uint numChannels{mDevice->channelsFromFmt()};
uint ossSpeed{mDevice->Frequency};
uint frameSize{numChannels * mDevice->bytesFromFmt()};
/* According to the OSS spec, 16 bytes (log2(16)) is the minimum. */
uint log2FragmentSize{maxu(log2i(mDevice->UpdateSize*frameSize), 4)};
uint numFragmentsLogSize{(periods << 16) | log2FragmentSize};
audio_buf_info info{};
const char *err;
#define CHECKERR(func) if((func) < 0) { \
err = #func; \
goto err; \
}
/* Don't fail if SETFRAGMENT fails. We can handle just about anything
* that's reported back via GETOSPACE */
ioctl(mFd, SNDCTL_DSP_SETFRAGMENT, &numFragmentsLogSize);
CHECKERR(ioctl(mFd, SNDCTL_DSP_SETFMT, &ossFormat));
CHECKERR(ioctl(mFd, SNDCTL_DSP_CHANNELS, &numChannels));
CHECKERR(ioctl(mFd, SNDCTL_DSP_SPEED, &ossSpeed));
CHECKERR(ioctl(mFd, SNDCTL_DSP_GETOSPACE, &info));
if(0)
{
err:
ERR("%s failed: %s\n", err, strerror(errno));
return false;
}
#undef CHECKERR
if(mDevice->channelsFromFmt() != numChannels)
{
ERR("Failed to set %s, got %d channels instead\n", DevFmtChannelsString(mDevice->FmtChans),
numChannels);
return false;
}
if(!((ossFormat == AFMT_S8 && mDevice->FmtType == DevFmtByte) ||
(ossFormat == AFMT_U8 && mDevice->FmtType == DevFmtUByte) ||
(ossFormat == AFMT_S16_NE && mDevice->FmtType == DevFmtShort)))
{
ERR("Failed to set %s samples, got OSS format %#x\n", DevFmtTypeString(mDevice->FmtType),
ossFormat);
return false;
}
mDevice->Frequency = ossSpeed;
mDevice->UpdateSize = static_cast<uint>(info.fragsize) / frameSize;
mDevice->BufferSize = static_cast<uint>(info.fragments) * mDevice->UpdateSize;
setDefaultChannelOrder();
mMixData.resize(mDevice->UpdateSize * mDevice->frameSizeFromFmt());
return true;
}
void OSSPlayback::start()
{
try {
mKillNow.store(false, std::memory_order_release);
mThread = std::thread{std::mem_fn(&OSSPlayback::mixerProc), this};
}
catch(std::exception& e) {
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to start mixing thread: %s", e.what()};
}
}
void OSSPlayback::stop()
{
if(mKillNow.exchange(true, std::memory_order_acq_rel) || !mThread.joinable())
return;
mThread.join();
if(ioctl(mFd, SNDCTL_DSP_RESET) != 0)
ERR("Error resetting device: %s\n", strerror(errno));
}
struct OSScapture final : public BackendBase {
OSScapture(ALCdevice *device) noexcept : BackendBase{device} { }
~OSScapture() override;
int recordProc();
void open(const char *name) override;
void start() override;
void stop() override;
void captureSamples(al::byte *buffer, uint samples) override;
uint availableSamples() override;
int mFd{-1};
RingBufferPtr mRing{nullptr};
std::atomic<bool> mKillNow{true};
std::thread mThread;
DEF_NEWDEL(OSScapture)
};
OSScapture::~OSScapture()
{
if(mFd != -1)
close(mFd);
mFd = -1;
}
int OSScapture::recordProc()
{
SetRTPriority();
althrd_setname(RECORD_THREAD_NAME);
const size_t frame_size{mDevice->frameSizeFromFmt()};
while(!mKillNow.load(std::memory_order_acquire))
{
pollfd pollitem{};
pollitem.fd = mFd;
pollitem.events = POLLIN;
int sret{poll(&pollitem, 1, 1000)};
if(sret < 0)
{
if(errno == EINTR || errno == EAGAIN)
continue;
ERR("poll failed: %s\n", strerror(errno));
mDevice->handleDisconnect("Failed to check capture samples: %s", strerror(errno));
break;
}
else if(sret == 0)
{
WARN("poll timeout\n");
continue;
}
auto vec = mRing->getWriteVector();
if(vec.first.len > 0)
{
ssize_t amt{read(mFd, vec.first.buf, vec.first.len*frame_size)};
if(amt < 0)
{
ERR("read failed: %s\n", strerror(errno));
mDevice->handleDisconnect("Failed reading capture samples: %s", strerror(errno));
break;
}
mRing->writeAdvance(static_cast<size_t>(amt)/frame_size);
}
}
return 0;
}
void OSScapture::open(const char *name)
{
const char *devname{DefaultCapture.c_str()};
if(!name)
name = DefaultName;
else
{
if(CaptureDevices.empty())
ALCossListPopulate(CaptureDevices, DSP_CAP_INPUT);
auto iter = std::find_if(CaptureDevices.cbegin(), CaptureDevices.cend(),
[&name](const DevMap &entry) -> bool
{ return entry.name == name; }
);
if(iter == CaptureDevices.cend())
throw al::backend_exception{al::backend_error::NoDevice,
"Device name \"%s\" not found", name};
devname = iter->device_name.c_str();
}
mFd = ::open(devname, O_RDONLY);
if(mFd == -1)
throw al::backend_exception{al::backend_error::NoDevice, "Could not open %s: %s", devname,
strerror(errno)};
int ossFormat{};
switch(mDevice->FmtType)
{
case DevFmtByte:
ossFormat = AFMT_S8;
break;
case DevFmtUByte:
ossFormat = AFMT_U8;
break;
case DevFmtShort:
ossFormat = AFMT_S16_NE;
break;
case DevFmtUShort:
case DevFmtInt:
case DevFmtUInt:
case DevFmtFloat:
throw al::backend_exception{al::backend_error::DeviceError,
"%s capture samples not supported", DevFmtTypeString(mDevice->FmtType)};
}
uint periods{4};
uint numChannels{mDevice->channelsFromFmt()};
uint frameSize{numChannels * mDevice->bytesFromFmt()};
uint ossSpeed{mDevice->Frequency};
/* according to the OSS spec, 16 bytes are the minimum */
uint log2FragmentSize{maxu(log2i(mDevice->BufferSize * frameSize / periods), 4)};
uint numFragmentsLogSize{(periods << 16) | log2FragmentSize};
audio_buf_info info{};
#define CHECKERR(func) if((func) < 0) { \
throw al::backend_exception{al::backend_error::DeviceError, #func " failed: %s", \
strerror(errno)}; \
}
CHECKERR(ioctl(mFd, SNDCTL_DSP_SETFRAGMENT, &numFragmentsLogSize));
CHECKERR(ioctl(mFd, SNDCTL_DSP_SETFMT, &ossFormat));
CHECKERR(ioctl(mFd, SNDCTL_DSP_CHANNELS, &numChannels));
CHECKERR(ioctl(mFd, SNDCTL_DSP_SPEED, &ossSpeed));
CHECKERR(ioctl(mFd, SNDCTL_DSP_GETISPACE, &info));
#undef CHECKERR
if(mDevice->channelsFromFmt() != numChannels)
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to set %s, got %d channels instead", DevFmtChannelsString(mDevice->FmtChans),
numChannels};
if(!((ossFormat == AFMT_S8 && mDevice->FmtType == DevFmtByte)
|| (ossFormat == AFMT_U8 && mDevice->FmtType == DevFmtUByte)
|| (ossFormat == AFMT_S16_NE && mDevice->FmtType == DevFmtShort)))
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to set %s samples, got OSS format %#x", DevFmtTypeString(mDevice->FmtType),
ossFormat};
mRing = RingBuffer::Create(mDevice->BufferSize, frameSize, false);
mDevice->DeviceName = name;
}
void OSScapture::start()
{
try {
mKillNow.store(false, std::memory_order_release);
mThread = std::thread{std::mem_fn(&OSScapture::recordProc), this};
}
catch(std::exception& e) {
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to start recording thread: %s", e.what()};
}
}
void OSScapture::stop()
{
if(mKillNow.exchange(true, std::memory_order_acq_rel) || !mThread.joinable())
return;
mThread.join();
if(ioctl(mFd, SNDCTL_DSP_RESET) != 0)
ERR("Error resetting device: %s\n", strerror(errno));
}
void OSScapture::captureSamples(al::byte *buffer, uint samples)
{ mRing->read(buffer, samples); }
uint OSScapture::availableSamples()
{ return static_cast<uint>(mRing->readSpace()); }
} // namespace
BackendFactory &OSSBackendFactory::getFactory()
{
static OSSBackendFactory factory{};
return factory;
}
bool OSSBackendFactory::init()
{
if(auto devopt = ConfigValueStr(nullptr, "oss", "device"))
DefaultPlayback = std::move(*devopt);
if(auto capopt = ConfigValueStr(nullptr, "oss", "capture"))
DefaultCapture = std::move(*capopt);
return true;
}
bool OSSBackendFactory::querySupport(BackendType type)
{ return (type == BackendType::Playback || type == BackendType::Capture); }
std::string OSSBackendFactory::probe(BackendType type)
{
std::string outnames;
auto add_device = [&outnames](const DevMap &entry) -> void
{
struct stat buf;
if(stat(entry.device_name.c_str(), &buf) == 0)
{
/* Includes null char. */
outnames.append(entry.name.c_str(), entry.name.length()+1);
}
};
switch(type)
{
case BackendType::Playback:
PlaybackDevices.clear();
ALCossListPopulate(PlaybackDevices, DSP_CAP_OUTPUT);
std::for_each(PlaybackDevices.cbegin(), PlaybackDevices.cend(), add_device);
break;
case BackendType::Capture:
CaptureDevices.clear();
ALCossListPopulate(CaptureDevices, DSP_CAP_INPUT);
std::for_each(CaptureDevices.cbegin(), CaptureDevices.cend(), add_device);
break;
}
return outnames;
}
BackendPtr OSSBackendFactory::createBackend(ALCdevice *device, BackendType type)
{
if(type == BackendType::Playback)
return BackendPtr{new OSSPlayback{device}};
if(type == BackendType::Capture)
return BackendPtr{new OSScapture{device}};
return nullptr;
}

View file

@ -0,0 +1,19 @@
#ifndef BACKENDS_OSS_H
#define BACKENDS_OSS_H
#include "backends/base.h"
struct OSSBackendFactory final : public BackendFactory {
public:
bool init() override;
bool querySupport(BackendType type) override;
std::string probe(BackendType type) override;
BackendPtr createBackend(ALCdevice *device, BackendType type) override;
static BackendFactory &getFactory();
};
#endif /* BACKENDS_OSS_H */

View file

@ -1,558 +0,0 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 1999-2007 by authors.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "alMain.h"
#include "alu.h"
#include "alconfig.h"
#include "ringbuffer.h"
#include "compat.h"
#include "backends/base.h"
#include <portaudio.h>
static const ALCchar pa_device[] = "PortAudio Default";
#ifdef HAVE_DYNLOAD
static void *pa_handle;
#define MAKE_FUNC(x) static __typeof(x) * p##x
MAKE_FUNC(Pa_Initialize);
MAKE_FUNC(Pa_Terminate);
MAKE_FUNC(Pa_GetErrorText);
MAKE_FUNC(Pa_StartStream);
MAKE_FUNC(Pa_StopStream);
MAKE_FUNC(Pa_OpenStream);
MAKE_FUNC(Pa_CloseStream);
MAKE_FUNC(Pa_GetDefaultOutputDevice);
MAKE_FUNC(Pa_GetDefaultInputDevice);
MAKE_FUNC(Pa_GetStreamInfo);
#undef MAKE_FUNC
#define Pa_Initialize pPa_Initialize
#define Pa_Terminate pPa_Terminate
#define Pa_GetErrorText pPa_GetErrorText
#define Pa_StartStream pPa_StartStream
#define Pa_StopStream pPa_StopStream
#define Pa_OpenStream pPa_OpenStream
#define Pa_CloseStream pPa_CloseStream
#define Pa_GetDefaultOutputDevice pPa_GetDefaultOutputDevice
#define Pa_GetDefaultInputDevice pPa_GetDefaultInputDevice
#define Pa_GetStreamInfo pPa_GetStreamInfo
#endif
static ALCboolean pa_load(void)
{
PaError err;
#ifdef HAVE_DYNLOAD
if(!pa_handle)
{
#ifdef _WIN32
# define PALIB "portaudio.dll"
#elif defined(__APPLE__) && defined(__MACH__)
# define PALIB "libportaudio.2.dylib"
#elif defined(__OpenBSD__)
# define PALIB "libportaudio.so"
#else
# define PALIB "libportaudio.so.2"
#endif
pa_handle = LoadLib(PALIB);
if(!pa_handle)
return ALC_FALSE;
#define LOAD_FUNC(f) do { \
p##f = GetSymbol(pa_handle, #f); \
if(p##f == NULL) \
{ \
CloseLib(pa_handle); \
pa_handle = NULL; \
return ALC_FALSE; \
} \
} while(0)
LOAD_FUNC(Pa_Initialize);
LOAD_FUNC(Pa_Terminate);
LOAD_FUNC(Pa_GetErrorText);
LOAD_FUNC(Pa_StartStream);
LOAD_FUNC(Pa_StopStream);
LOAD_FUNC(Pa_OpenStream);
LOAD_FUNC(Pa_CloseStream);
LOAD_FUNC(Pa_GetDefaultOutputDevice);
LOAD_FUNC(Pa_GetDefaultInputDevice);
LOAD_FUNC(Pa_GetStreamInfo);
#undef LOAD_FUNC
if((err=Pa_Initialize()) != paNoError)
{
ERR("Pa_Initialize() returned an error: %s\n", Pa_GetErrorText(err));
CloseLib(pa_handle);
pa_handle = NULL;
return ALC_FALSE;
}
}
#else
if((err=Pa_Initialize()) != paNoError)
{
ERR("Pa_Initialize() returned an error: %s\n", Pa_GetErrorText(err));
return ALC_FALSE;
}
#endif
return ALC_TRUE;
}
typedef struct ALCportPlayback {
DERIVE_FROM_TYPE(ALCbackend);
PaStream *stream;
PaStreamParameters params;
ALuint update_size;
} ALCportPlayback;
static int ALCportPlayback_WriteCallback(const void *inputBuffer, void *outputBuffer,
unsigned long framesPerBuffer, const PaStreamCallbackTimeInfo *timeInfo,
const PaStreamCallbackFlags statusFlags, void *userData);
static void ALCportPlayback_Construct(ALCportPlayback *self, ALCdevice *device);
static void ALCportPlayback_Destruct(ALCportPlayback *self);
static ALCenum ALCportPlayback_open(ALCportPlayback *self, const ALCchar *name);
static ALCboolean ALCportPlayback_reset(ALCportPlayback *self);
static ALCboolean ALCportPlayback_start(ALCportPlayback *self);
static void ALCportPlayback_stop(ALCportPlayback *self);
static DECLARE_FORWARD2(ALCportPlayback, ALCbackend, ALCenum, captureSamples, ALCvoid*, ALCuint)
static DECLARE_FORWARD(ALCportPlayback, ALCbackend, ALCuint, availableSamples)
static DECLARE_FORWARD(ALCportPlayback, ALCbackend, ClockLatency, getClockLatency)
static DECLARE_FORWARD(ALCportPlayback, ALCbackend, void, lock)
static DECLARE_FORWARD(ALCportPlayback, ALCbackend, void, unlock)
DECLARE_DEFAULT_ALLOCATORS(ALCportPlayback)
DEFINE_ALCBACKEND_VTABLE(ALCportPlayback);
static void ALCportPlayback_Construct(ALCportPlayback *self, ALCdevice *device)
{
ALCbackend_Construct(STATIC_CAST(ALCbackend, self), device);
SET_VTABLE2(ALCportPlayback, ALCbackend, self);
self->stream = NULL;
}
static void ALCportPlayback_Destruct(ALCportPlayback *self)
{
PaError err = self->stream ? Pa_CloseStream(self->stream) : paNoError;
if(err != paNoError)
ERR("Error closing stream: %s\n", Pa_GetErrorText(err));
self->stream = NULL;
ALCbackend_Destruct(STATIC_CAST(ALCbackend, self));
}
static int ALCportPlayback_WriteCallback(const void *UNUSED(inputBuffer), void *outputBuffer,
unsigned long framesPerBuffer, const PaStreamCallbackTimeInfo *UNUSED(timeInfo),
const PaStreamCallbackFlags UNUSED(statusFlags), void *userData)
{
ALCportPlayback *self = userData;
ALCportPlayback_lock(self);
aluMixData(STATIC_CAST(ALCbackend, self)->mDevice, outputBuffer, framesPerBuffer);
ALCportPlayback_unlock(self);
return 0;
}
static ALCenum ALCportPlayback_open(ALCportPlayback *self, const ALCchar *name)
{
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
PaError err;
if(!name)
name = pa_device;
else if(strcmp(name, pa_device) != 0)
return ALC_INVALID_VALUE;
self->update_size = device->UpdateSize;
self->params.device = -1;
if(!ConfigValueInt(NULL, "port", "device", &self->params.device) ||
self->params.device < 0)
self->params.device = Pa_GetDefaultOutputDevice();
self->params.suggestedLatency = (device->UpdateSize*device->NumUpdates) /
(float)device->Frequency;
self->params.hostApiSpecificStreamInfo = NULL;
self->params.channelCount = ((device->FmtChans == DevFmtMono) ? 1 : 2);
switch(device->FmtType)
{
case DevFmtByte:
self->params.sampleFormat = paInt8;
break;
case DevFmtUByte:
self->params.sampleFormat = paUInt8;
break;
case DevFmtUShort:
/* fall-through */
case DevFmtShort:
self->params.sampleFormat = paInt16;
break;
case DevFmtUInt:
/* fall-through */
case DevFmtInt:
self->params.sampleFormat = paInt32;
break;
case DevFmtFloat:
self->params.sampleFormat = paFloat32;
break;
}
retry_open:
err = Pa_OpenStream(&self->stream, NULL, &self->params,
device->Frequency, device->UpdateSize, paNoFlag,
ALCportPlayback_WriteCallback, self
);
if(err != paNoError)
{
if(self->params.sampleFormat == paFloat32)
{
self->params.sampleFormat = paInt16;
goto retry_open;
}
ERR("Pa_OpenStream() returned an error: %s\n", Pa_GetErrorText(err));
return ALC_INVALID_VALUE;
}
alstr_copy_cstr(&device->DeviceName, name);
return ALC_NO_ERROR;
}
static ALCboolean ALCportPlayback_reset(ALCportPlayback *self)
{
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
const PaStreamInfo *streamInfo;
streamInfo = Pa_GetStreamInfo(self->stream);
device->Frequency = streamInfo->sampleRate;
device->UpdateSize = self->update_size;
if(self->params.sampleFormat == paInt8)
device->FmtType = DevFmtByte;
else if(self->params.sampleFormat == paUInt8)
device->FmtType = DevFmtUByte;
else if(self->params.sampleFormat == paInt16)
device->FmtType = DevFmtShort;
else if(self->params.sampleFormat == paInt32)
device->FmtType = DevFmtInt;
else if(self->params.sampleFormat == paFloat32)
device->FmtType = DevFmtFloat;
else
{
ERR("Unexpected sample format: 0x%lx\n", self->params.sampleFormat);
return ALC_FALSE;
}
if(self->params.channelCount == 2)
device->FmtChans = DevFmtStereo;
else if(self->params.channelCount == 1)
device->FmtChans = DevFmtMono;
else
{
ERR("Unexpected channel count: %u\n", self->params.channelCount);
return ALC_FALSE;
}
SetDefaultChannelOrder(device);
return ALC_TRUE;
}
static ALCboolean ALCportPlayback_start(ALCportPlayback *self)
{
PaError err;
err = Pa_StartStream(self->stream);
if(err != paNoError)
{
ERR("Pa_StartStream() returned an error: %s\n", Pa_GetErrorText(err));
return ALC_FALSE;
}
return ALC_TRUE;
}
static void ALCportPlayback_stop(ALCportPlayback *self)
{
PaError err = Pa_StopStream(self->stream);
if(err != paNoError)
ERR("Error stopping stream: %s\n", Pa_GetErrorText(err));
}
typedef struct ALCportCapture {
DERIVE_FROM_TYPE(ALCbackend);
PaStream *stream;
PaStreamParameters params;
ll_ringbuffer_t *ring;
} ALCportCapture;
static int ALCportCapture_ReadCallback(const void *inputBuffer, void *outputBuffer,
unsigned long framesPerBuffer, const PaStreamCallbackTimeInfo *timeInfo,
const PaStreamCallbackFlags statusFlags, void *userData);
static void ALCportCapture_Construct(ALCportCapture *self, ALCdevice *device);
static void ALCportCapture_Destruct(ALCportCapture *self);
static ALCenum ALCportCapture_open(ALCportCapture *self, const ALCchar *name);
static DECLARE_FORWARD(ALCportCapture, ALCbackend, ALCboolean, reset)
static ALCboolean ALCportCapture_start(ALCportCapture *self);
static void ALCportCapture_stop(ALCportCapture *self);
static ALCenum ALCportCapture_captureSamples(ALCportCapture *self, ALCvoid *buffer, ALCuint samples);
static ALCuint ALCportCapture_availableSamples(ALCportCapture *self);
static DECLARE_FORWARD(ALCportCapture, ALCbackend, ClockLatency, getClockLatency)
static DECLARE_FORWARD(ALCportCapture, ALCbackend, void, lock)
static DECLARE_FORWARD(ALCportCapture, ALCbackend, void, unlock)
DECLARE_DEFAULT_ALLOCATORS(ALCportCapture)
DEFINE_ALCBACKEND_VTABLE(ALCportCapture);
static void ALCportCapture_Construct(ALCportCapture *self, ALCdevice *device)
{
ALCbackend_Construct(STATIC_CAST(ALCbackend, self), device);
SET_VTABLE2(ALCportCapture, ALCbackend, self);
self->stream = NULL;
self->ring = NULL;
}
static void ALCportCapture_Destruct(ALCportCapture *self)
{
PaError err = self->stream ? Pa_CloseStream(self->stream) : paNoError;
if(err != paNoError)
ERR("Error closing stream: %s\n", Pa_GetErrorText(err));
self->stream = NULL;
ll_ringbuffer_free(self->ring);
self->ring = NULL;
ALCbackend_Destruct(STATIC_CAST(ALCbackend, self));
}
static int ALCportCapture_ReadCallback(const void *inputBuffer, void *UNUSED(outputBuffer),
unsigned long framesPerBuffer, const PaStreamCallbackTimeInfo *UNUSED(timeInfo),
const PaStreamCallbackFlags UNUSED(statusFlags), void *userData)
{
ALCportCapture *self = userData;
size_t writable = ll_ringbuffer_write_space(self->ring);
if(framesPerBuffer > writable)
framesPerBuffer = writable;
ll_ringbuffer_write(self->ring, inputBuffer, framesPerBuffer);
return 0;
}
static ALCenum ALCportCapture_open(ALCportCapture *self, const ALCchar *name)
{
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
ALuint samples, frame_size;
PaError err;
if(!name)
name = pa_device;
else if(strcmp(name, pa_device) != 0)
return ALC_INVALID_VALUE;
samples = device->UpdateSize * device->NumUpdates;
samples = maxu(samples, 100 * device->Frequency / 1000);
frame_size = FrameSizeFromDevFmt(device->FmtChans, device->FmtType, device->AmbiOrder);
self->ring = ll_ringbuffer_create(samples, frame_size, false);
if(self->ring == NULL) return ALC_INVALID_VALUE;
self->params.device = -1;
if(!ConfigValueInt(NULL, "port", "capture", &self->params.device) ||
self->params.device < 0)
self->params.device = Pa_GetDefaultInputDevice();
self->params.suggestedLatency = 0.0f;
self->params.hostApiSpecificStreamInfo = NULL;
switch(device->FmtType)
{
case DevFmtByte:
self->params.sampleFormat = paInt8;
break;
case DevFmtUByte:
self->params.sampleFormat = paUInt8;
break;
case DevFmtShort:
self->params.sampleFormat = paInt16;
break;
case DevFmtInt:
self->params.sampleFormat = paInt32;
break;
case DevFmtFloat:
self->params.sampleFormat = paFloat32;
break;
case DevFmtUInt:
case DevFmtUShort:
ERR("%s samples not supported\n", DevFmtTypeString(device->FmtType));
return ALC_INVALID_VALUE;
}
self->params.channelCount = ChannelsFromDevFmt(device->FmtChans, device->AmbiOrder);
err = Pa_OpenStream(&self->stream, &self->params, NULL,
device->Frequency, paFramesPerBufferUnspecified, paNoFlag,
ALCportCapture_ReadCallback, self
);
if(err != paNoError)
{
ERR("Pa_OpenStream() returned an error: %s\n", Pa_GetErrorText(err));
return ALC_INVALID_VALUE;
}
alstr_copy_cstr(&device->DeviceName, name);
return ALC_NO_ERROR;
}
static ALCboolean ALCportCapture_start(ALCportCapture *self)
{
PaError err = Pa_StartStream(self->stream);
if(err != paNoError)
{
ERR("Error starting stream: %s\n", Pa_GetErrorText(err));
return ALC_FALSE;
}
return ALC_TRUE;
}
static void ALCportCapture_stop(ALCportCapture *self)
{
PaError err = Pa_StopStream(self->stream);
if(err != paNoError)
ERR("Error stopping stream: %s\n", Pa_GetErrorText(err));
}
static ALCuint ALCportCapture_availableSamples(ALCportCapture *self)
{
return ll_ringbuffer_read_space(self->ring);
}
static ALCenum ALCportCapture_captureSamples(ALCportCapture *self, ALCvoid *buffer, ALCuint samples)
{
ll_ringbuffer_read(self->ring, buffer, samples);
return ALC_NO_ERROR;
}
typedef struct ALCportBackendFactory {
DERIVE_FROM_TYPE(ALCbackendFactory);
} ALCportBackendFactory;
#define ALCPORTBACKENDFACTORY_INITIALIZER { { GET_VTABLE2(ALCportBackendFactory, ALCbackendFactory) } }
static ALCboolean ALCportBackendFactory_init(ALCportBackendFactory *self);
static void ALCportBackendFactory_deinit(ALCportBackendFactory *self);
static ALCboolean ALCportBackendFactory_querySupport(ALCportBackendFactory *self, ALCbackend_Type type);
static void ALCportBackendFactory_probe(ALCportBackendFactory *self, enum DevProbe type);
static ALCbackend* ALCportBackendFactory_createBackend(ALCportBackendFactory *self, ALCdevice *device, ALCbackend_Type type);
DEFINE_ALCBACKENDFACTORY_VTABLE(ALCportBackendFactory);
static ALCboolean ALCportBackendFactory_init(ALCportBackendFactory* UNUSED(self))
{
if(!pa_load())
return ALC_FALSE;
return ALC_TRUE;
}
static void ALCportBackendFactory_deinit(ALCportBackendFactory* UNUSED(self))
{
#ifdef HAVE_DYNLOAD
if(pa_handle)
{
Pa_Terminate();
CloseLib(pa_handle);
pa_handle = NULL;
}
#else
Pa_Terminate();
#endif
}
static ALCboolean ALCportBackendFactory_querySupport(ALCportBackendFactory* UNUSED(self), ALCbackend_Type type)
{
if(type == ALCbackend_Playback || type == ALCbackend_Capture)
return ALC_TRUE;
return ALC_FALSE;
}
static void ALCportBackendFactory_probe(ALCportBackendFactory* UNUSED(self), enum DevProbe type)
{
switch(type)
{
case ALL_DEVICE_PROBE:
AppendAllDevicesList(pa_device);
break;
case CAPTURE_DEVICE_PROBE:
AppendCaptureDeviceList(pa_device);
break;
}
}
static ALCbackend* ALCportBackendFactory_createBackend(ALCportBackendFactory* UNUSED(self), ALCdevice *device, ALCbackend_Type type)
{
if(type == ALCbackend_Playback)
{
ALCportPlayback *backend;
NEW_OBJ(backend, ALCportPlayback)(device);
if(!backend) return NULL;
return STATIC_CAST(ALCbackend, backend);
}
if(type == ALCbackend_Capture)
{
ALCportCapture *backend;
NEW_OBJ(backend, ALCportCapture)(device);
if(!backend) return NULL;
return STATIC_CAST(ALCbackend, backend);
}
return NULL;
}
ALCbackendFactory *ALCportBackendFactory_getFactory(void)
{
static ALCportBackendFactory factory = ALCPORTBACKENDFACTORY_INITIALIZER;
return STATIC_CAST(ALCbackendFactory, &factory);
}

View file

@ -0,0 +1,442 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 1999-2007 by authors.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include "backends/portaudio.h"
#include <cstdio>
#include <cstdlib>
#include <cstring>
#include "alcmain.h"
#include "alu.h"
#include "alconfig.h"
#include "core/logging.h"
#include "dynload.h"
#include "ringbuffer.h"
#include <portaudio.h>
namespace {
constexpr char pa_device[] = "PortAudio Default";
#ifdef HAVE_DYNLOAD
void *pa_handle;
#define MAKE_FUNC(x) decltype(x) * p##x
MAKE_FUNC(Pa_Initialize);
MAKE_FUNC(Pa_Terminate);
MAKE_FUNC(Pa_GetErrorText);
MAKE_FUNC(Pa_StartStream);
MAKE_FUNC(Pa_StopStream);
MAKE_FUNC(Pa_OpenStream);
MAKE_FUNC(Pa_CloseStream);
MAKE_FUNC(Pa_GetDefaultOutputDevice);
MAKE_FUNC(Pa_GetDefaultInputDevice);
MAKE_FUNC(Pa_GetStreamInfo);
#undef MAKE_FUNC
#ifndef IN_IDE_PARSER
#define Pa_Initialize pPa_Initialize
#define Pa_Terminate pPa_Terminate
#define Pa_GetErrorText pPa_GetErrorText
#define Pa_StartStream pPa_StartStream
#define Pa_StopStream pPa_StopStream
#define Pa_OpenStream pPa_OpenStream
#define Pa_CloseStream pPa_CloseStream
#define Pa_GetDefaultOutputDevice pPa_GetDefaultOutputDevice
#define Pa_GetDefaultInputDevice pPa_GetDefaultInputDevice
#define Pa_GetStreamInfo pPa_GetStreamInfo
#endif
#endif
struct PortPlayback final : public BackendBase {
PortPlayback(ALCdevice *device) noexcept : BackendBase{device} { }
~PortPlayback() override;
int writeCallback(const void *inputBuffer, void *outputBuffer, unsigned long framesPerBuffer,
const PaStreamCallbackTimeInfo *timeInfo, const PaStreamCallbackFlags statusFlags) noexcept;
static int writeCallbackC(const void *inputBuffer, void *outputBuffer,
unsigned long framesPerBuffer, const PaStreamCallbackTimeInfo *timeInfo,
const PaStreamCallbackFlags statusFlags, void *userData) noexcept
{
return static_cast<PortPlayback*>(userData)->writeCallback(inputBuffer, outputBuffer,
framesPerBuffer, timeInfo, statusFlags);
}
void open(const char *name) override;
bool reset() override;
void start() override;
void stop() override;
PaStream *mStream{nullptr};
PaStreamParameters mParams{};
uint mUpdateSize{0u};
DEF_NEWDEL(PortPlayback)
};
PortPlayback::~PortPlayback()
{
PaError err{mStream ? Pa_CloseStream(mStream) : paNoError};
if(err != paNoError)
ERR("Error closing stream: %s\n", Pa_GetErrorText(err));
mStream = nullptr;
}
int PortPlayback::writeCallback(const void*, void *outputBuffer, unsigned long framesPerBuffer,
const PaStreamCallbackTimeInfo*, const PaStreamCallbackFlags) noexcept
{
mDevice->renderSamples(outputBuffer, static_cast<uint>(framesPerBuffer),
static_cast<uint>(mParams.channelCount));
return 0;
}
void PortPlayback::open(const char *name)
{
if(!name)
name = pa_device;
else if(strcmp(name, pa_device) != 0)
throw al::backend_exception{al::backend_error::NoDevice, "Device name \"%s\" not found",
name};
mUpdateSize = mDevice->UpdateSize;
auto devidopt = ConfigValueInt(nullptr, "port", "device");
if(devidopt && *devidopt >= 0) mParams.device = *devidopt;
else mParams.device = Pa_GetDefaultOutputDevice();
mParams.suggestedLatency = mDevice->BufferSize / static_cast<double>(mDevice->Frequency);
mParams.hostApiSpecificStreamInfo = nullptr;
mParams.channelCount = ((mDevice->FmtChans == DevFmtMono) ? 1 : 2);
switch(mDevice->FmtType)
{
case DevFmtByte:
mParams.sampleFormat = paInt8;
break;
case DevFmtUByte:
mParams.sampleFormat = paUInt8;
break;
case DevFmtUShort:
/* fall-through */
case DevFmtShort:
mParams.sampleFormat = paInt16;
break;
case DevFmtUInt:
/* fall-through */
case DevFmtInt:
mParams.sampleFormat = paInt32;
break;
case DevFmtFloat:
mParams.sampleFormat = paFloat32;
break;
}
retry_open:
PaError err{Pa_OpenStream(&mStream, nullptr, &mParams, mDevice->Frequency, mDevice->UpdateSize,
paNoFlag, &PortPlayback::writeCallbackC, this)};
if(err != paNoError)
{
if(mParams.sampleFormat == paFloat32)
{
mParams.sampleFormat = paInt16;
goto retry_open;
}
throw al::backend_exception{al::backend_error::NoDevice, "Failed to open stream: %s",
Pa_GetErrorText(err)};
}
mDevice->DeviceName = name;
}
bool PortPlayback::reset()
{
const PaStreamInfo *streamInfo{Pa_GetStreamInfo(mStream)};
mDevice->Frequency = static_cast<uint>(streamInfo->sampleRate);
mDevice->UpdateSize = mUpdateSize;
if(mParams.sampleFormat == paInt8)
mDevice->FmtType = DevFmtByte;
else if(mParams.sampleFormat == paUInt8)
mDevice->FmtType = DevFmtUByte;
else if(mParams.sampleFormat == paInt16)
mDevice->FmtType = DevFmtShort;
else if(mParams.sampleFormat == paInt32)
mDevice->FmtType = DevFmtInt;
else if(mParams.sampleFormat == paFloat32)
mDevice->FmtType = DevFmtFloat;
else
{
ERR("Unexpected sample format: 0x%lx\n", mParams.sampleFormat);
return false;
}
if(mParams.channelCount == 2)
mDevice->FmtChans = DevFmtStereo;
else if(mParams.channelCount == 1)
mDevice->FmtChans = DevFmtMono;
else
{
ERR("Unexpected channel count: %u\n", mParams.channelCount);
return false;
}
setDefaultChannelOrder();
return true;
}
void PortPlayback::start()
{
const PaError err{Pa_StartStream(mStream)};
if(err == paNoError)
throw al::backend_exception{al::backend_error::DeviceError, "Failed to start playback: %s",
Pa_GetErrorText(err)};
}
void PortPlayback::stop()
{
PaError err{Pa_StopStream(mStream)};
if(err != paNoError)
ERR("Error stopping stream: %s\n", Pa_GetErrorText(err));
}
struct PortCapture final : public BackendBase {
PortCapture(ALCdevice *device) noexcept : BackendBase{device} { }
~PortCapture() override;
int readCallback(const void *inputBuffer, void *outputBuffer, unsigned long framesPerBuffer,
const PaStreamCallbackTimeInfo *timeInfo, const PaStreamCallbackFlags statusFlags) noexcept;
static int readCallbackC(const void *inputBuffer, void *outputBuffer,
unsigned long framesPerBuffer, const PaStreamCallbackTimeInfo *timeInfo,
const PaStreamCallbackFlags statusFlags, void *userData) noexcept
{
return static_cast<PortCapture*>(userData)->readCallback(inputBuffer, outputBuffer,
framesPerBuffer, timeInfo, statusFlags);
}
void open(const char *name) override;
void start() override;
void stop() override;
void captureSamples(al::byte *buffer, uint samples) override;
uint availableSamples() override;
PaStream *mStream{nullptr};
PaStreamParameters mParams;
RingBufferPtr mRing{nullptr};
DEF_NEWDEL(PortCapture)
};
PortCapture::~PortCapture()
{
PaError err{mStream ? Pa_CloseStream(mStream) : paNoError};
if(err != paNoError)
ERR("Error closing stream: %s\n", Pa_GetErrorText(err));
mStream = nullptr;
}
int PortCapture::readCallback(const void *inputBuffer, void*, unsigned long framesPerBuffer,
const PaStreamCallbackTimeInfo*, const PaStreamCallbackFlags) noexcept
{
mRing->write(inputBuffer, framesPerBuffer);
return 0;
}
void PortCapture::open(const char *name)
{
if(!name)
name = pa_device;
else if(strcmp(name, pa_device) != 0)
throw al::backend_exception{al::backend_error::NoDevice, "Device name \"%s\" not found",
name};
uint samples{mDevice->BufferSize};
samples = maxu(samples, 100 * mDevice->Frequency / 1000);
uint frame_size{mDevice->frameSizeFromFmt()};
mRing = RingBuffer::Create(samples, frame_size, false);
auto devidopt = ConfigValueInt(nullptr, "port", "capture");
if(devidopt && *devidopt >= 0) mParams.device = *devidopt;
else mParams.device = Pa_GetDefaultOutputDevice();
mParams.suggestedLatency = 0.0f;
mParams.hostApiSpecificStreamInfo = nullptr;
switch(mDevice->FmtType)
{
case DevFmtByte:
mParams.sampleFormat = paInt8;
break;
case DevFmtUByte:
mParams.sampleFormat = paUInt8;
break;
case DevFmtShort:
mParams.sampleFormat = paInt16;
break;
case DevFmtInt:
mParams.sampleFormat = paInt32;
break;
case DevFmtFloat:
mParams.sampleFormat = paFloat32;
break;
case DevFmtUInt:
case DevFmtUShort:
throw al::backend_exception{al::backend_error::DeviceError, "%s samples not supported",
DevFmtTypeString(mDevice->FmtType)};
}
mParams.channelCount = static_cast<int>(mDevice->channelsFromFmt());
PaError err{Pa_OpenStream(&mStream, &mParams, nullptr, mDevice->Frequency,
paFramesPerBufferUnspecified, paNoFlag, &PortCapture::readCallbackC, this)};
if(err != paNoError)
throw al::backend_exception{al::backend_error::NoDevice, "Failed to open stream: %s",
Pa_GetErrorText(err)};
mDevice->DeviceName = name;
}
void PortCapture::start()
{
const PaError err{Pa_StartStream(mStream)};
if(err != paNoError)
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to start recording: %s", Pa_GetErrorText(err)};
}
void PortCapture::stop()
{
PaError err{Pa_StopStream(mStream)};
if(err != paNoError)
ERR("Error stopping stream: %s\n", Pa_GetErrorText(err));
}
uint PortCapture::availableSamples()
{ return static_cast<uint>(mRing->readSpace()); }
void PortCapture::captureSamples(al::byte *buffer, uint samples)
{ mRing->read(buffer, samples); }
} // namespace
bool PortBackendFactory::init()
{
PaError err;
#ifdef HAVE_DYNLOAD
if(!pa_handle)
{
#ifdef _WIN32
# define PALIB "portaudio.dll"
#elif defined(__APPLE__) && defined(__MACH__)
# define PALIB "libportaudio.2.dylib"
#elif defined(__OpenBSD__)
# define PALIB "libportaudio.so"
#else
# define PALIB "libportaudio.so.2"
#endif
pa_handle = LoadLib(PALIB);
if(!pa_handle)
return false;
#define LOAD_FUNC(f) do { \
p##f = reinterpret_cast<decltype(p##f)>(GetSymbol(pa_handle, #f)); \
if(p##f == nullptr) \
{ \
CloseLib(pa_handle); \
pa_handle = nullptr; \
return false; \
} \
} while(0)
LOAD_FUNC(Pa_Initialize);
LOAD_FUNC(Pa_Terminate);
LOAD_FUNC(Pa_GetErrorText);
LOAD_FUNC(Pa_StartStream);
LOAD_FUNC(Pa_StopStream);
LOAD_FUNC(Pa_OpenStream);
LOAD_FUNC(Pa_CloseStream);
LOAD_FUNC(Pa_GetDefaultOutputDevice);
LOAD_FUNC(Pa_GetDefaultInputDevice);
LOAD_FUNC(Pa_GetStreamInfo);
#undef LOAD_FUNC
if((err=Pa_Initialize()) != paNoError)
{
ERR("Pa_Initialize() returned an error: %s\n", Pa_GetErrorText(err));
CloseLib(pa_handle);
pa_handle = nullptr;
return false;
}
}
#else
if((err=Pa_Initialize()) != paNoError)
{
ERR("Pa_Initialize() returned an error: %s\n", Pa_GetErrorText(err));
return false;
}
#endif
return true;
}
bool PortBackendFactory::querySupport(BackendType type)
{ return (type == BackendType::Playback || type == BackendType::Capture); }
std::string PortBackendFactory::probe(BackendType type)
{
std::string outnames;
switch(type)
{
case BackendType::Playback:
case BackendType::Capture:
/* Includes null char. */
outnames.append(pa_device, sizeof(pa_device));
break;
}
return outnames;
}
BackendPtr PortBackendFactory::createBackend(ALCdevice *device, BackendType type)
{
if(type == BackendType::Playback)
return BackendPtr{new PortPlayback{device}};
if(type == BackendType::Capture)
return BackendPtr{new PortCapture{device}};
return nullptr;
}
BackendFactory &PortBackendFactory::getFactory()
{
static PortBackendFactory factory{};
return factory;
}

View file

@ -0,0 +1,19 @@
#ifndef BACKENDS_PORTAUDIO_H
#define BACKENDS_PORTAUDIO_H
#include "backends/base.h"
struct PortBackendFactory final : public BackendFactory {
public:
bool init() override;
bool querySupport(BackendType type) override;
std::string probe(BackendType type) override;
BackendPtr createBackend(ALCdevice *device, BackendType type) override;
static BackendFactory &getFactory();
};
#endif /* BACKENDS_PORTAUDIO_H */

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,19 @@
#ifndef BACKENDS_PULSEAUDIO_H
#define BACKENDS_PULSEAUDIO_H
#include "backends/base.h"
class PulseBackendFactory final : public BackendFactory {
public:
bool init() override;
bool querySupport(BackendType type) override;
std::string probe(BackendType type) override;
BackendPtr createBackend(ALCdevice *device, BackendType type) override;
static BackendFactory &getFactory();
};
#endif /* BACKENDS_PULSEAUDIO_H */

File diff suppressed because it is too large Load diff

View file

@ -1,287 +0,0 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2018 by authors.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <stdlib.h>
#include <SDL2/SDL.h>
#include "alMain.h"
#include "alu.h"
#include "threads.h"
#include "compat.h"
#include "backends/base.h"
#ifdef _WIN32
#define DEVNAME_PREFIX "OpenAL Soft on "
#else
#define DEVNAME_PREFIX ""
#endif
typedef struct ALCsdl2Backend {
DERIVE_FROM_TYPE(ALCbackend);
SDL_AudioDeviceID deviceID;
ALsizei frameSize;
ALuint Frequency;
enum DevFmtChannels FmtChans;
enum DevFmtType FmtType;
ALuint UpdateSize;
} ALCsdl2Backend;
static void ALCsdl2Backend_Construct(ALCsdl2Backend *self, ALCdevice *device);
static void ALCsdl2Backend_Destruct(ALCsdl2Backend *self);
static ALCenum ALCsdl2Backend_open(ALCsdl2Backend *self, const ALCchar *name);
static ALCboolean ALCsdl2Backend_reset(ALCsdl2Backend *self);
static ALCboolean ALCsdl2Backend_start(ALCsdl2Backend *self);
static void ALCsdl2Backend_stop(ALCsdl2Backend *self);
static DECLARE_FORWARD2(ALCsdl2Backend, ALCbackend, ALCenum, captureSamples, void*, ALCuint)
static DECLARE_FORWARD(ALCsdl2Backend, ALCbackend, ALCuint, availableSamples)
static DECLARE_FORWARD(ALCsdl2Backend, ALCbackend, ClockLatency, getClockLatency)
static void ALCsdl2Backend_lock(ALCsdl2Backend *self);
static void ALCsdl2Backend_unlock(ALCsdl2Backend *self);
DECLARE_DEFAULT_ALLOCATORS(ALCsdl2Backend)
DEFINE_ALCBACKEND_VTABLE(ALCsdl2Backend);
static const ALCchar defaultDeviceName[] = DEVNAME_PREFIX "Default Device";
static void ALCsdl2Backend_Construct(ALCsdl2Backend *self, ALCdevice *device)
{
ALCbackend_Construct(STATIC_CAST(ALCbackend, self), device);
SET_VTABLE2(ALCsdl2Backend, ALCbackend, self);
self->deviceID = 0;
self->frameSize = FrameSizeFromDevFmt(device->FmtChans, device->FmtType, device->AmbiOrder);
self->Frequency = device->Frequency;
self->FmtChans = device->FmtChans;
self->FmtType = device->FmtType;
self->UpdateSize = device->UpdateSize;
}
static void ALCsdl2Backend_Destruct(ALCsdl2Backend *self)
{
if(self->deviceID)
SDL_CloseAudioDevice(self->deviceID);
self->deviceID = 0;
ALCbackend_Destruct(STATIC_CAST(ALCbackend, self));
}
static void ALCsdl2Backend_audioCallback(void *ptr, Uint8 *stream, int len)
{
ALCsdl2Backend *self = (ALCsdl2Backend*)ptr;
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
assert((len % self->frameSize) == 0);
aluMixData(device, stream, len / self->frameSize);
}
static ALCenum ALCsdl2Backend_open(ALCsdl2Backend *self, const ALCchar *name)
{
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
SDL_AudioSpec want, have;
SDL_zero(want);
SDL_zero(have);
want.freq = device->Frequency;
switch(device->FmtType)
{
case DevFmtUByte: want.format = AUDIO_U8; break;
case DevFmtByte: want.format = AUDIO_S8; break;
case DevFmtUShort: want.format = AUDIO_U16SYS; break;
case DevFmtShort: want.format = AUDIO_S16SYS; break;
case DevFmtUInt: /* fall-through */
case DevFmtInt: want.format = AUDIO_S32SYS; break;
case DevFmtFloat: want.format = AUDIO_F32; break;
}
want.channels = (device->FmtChans == DevFmtMono) ? 1 : 2;
want.samples = device->UpdateSize;
want.callback = ALCsdl2Backend_audioCallback;
want.userdata = self;
/* Passing NULL to SDL_OpenAudioDevice opens a default, which isn't
* necessarily the first in the list.
*/
if(!name || strcmp(name, defaultDeviceName) == 0)
self->deviceID = SDL_OpenAudioDevice(NULL, SDL_FALSE, &want, &have,
SDL_AUDIO_ALLOW_ANY_CHANGE);
else
{
const size_t prefix_len = strlen(DEVNAME_PREFIX);
if(strncmp(name, DEVNAME_PREFIX, prefix_len) == 0)
self->deviceID = SDL_OpenAudioDevice(name+prefix_len, SDL_FALSE, &want, &have,
SDL_AUDIO_ALLOW_ANY_CHANGE);
else
self->deviceID = SDL_OpenAudioDevice(name, SDL_FALSE, &want, &have,
SDL_AUDIO_ALLOW_ANY_CHANGE);
}
if(self->deviceID == 0)
return ALC_INVALID_VALUE;
device->Frequency = have.freq;
if(have.channels == 1)
device->FmtChans = DevFmtMono;
else if(have.channels == 2)
device->FmtChans = DevFmtStereo;
else
{
ERR("Got unhandled SDL channel count: %d\n", (int)have.channels);
return ALC_INVALID_VALUE;
}
switch(have.format)
{
case AUDIO_U8: device->FmtType = DevFmtUByte; break;
case AUDIO_S8: device->FmtType = DevFmtByte; break;
case AUDIO_U16SYS: device->FmtType = DevFmtUShort; break;
case AUDIO_S16SYS: device->FmtType = DevFmtShort; break;
case AUDIO_S32SYS: device->FmtType = DevFmtInt; break;
case AUDIO_F32SYS: device->FmtType = DevFmtFloat; break;
default:
ERR("Got unsupported SDL format: 0x%04x\n", have.format);
return ALC_INVALID_VALUE;
}
device->UpdateSize = have.samples;
device->NumUpdates = 2; /* SDL always (tries to) use two periods. */
self->frameSize = FrameSizeFromDevFmt(device->FmtChans, device->FmtType, device->AmbiOrder);
self->Frequency = device->Frequency;
self->FmtChans = device->FmtChans;
self->FmtType = device->FmtType;
self->UpdateSize = device->UpdateSize;
alstr_copy_cstr(&device->DeviceName, name ? name : defaultDeviceName);
return ALC_NO_ERROR;
}
static ALCboolean ALCsdl2Backend_reset(ALCsdl2Backend *self)
{
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
device->Frequency = self->Frequency;
device->FmtChans = self->FmtChans;
device->FmtType = self->FmtType;
device->UpdateSize = self->UpdateSize;
device->NumUpdates = 2;
SetDefaultWFXChannelOrder(device);
return ALC_TRUE;
}
static ALCboolean ALCsdl2Backend_start(ALCsdl2Backend *self)
{
SDL_PauseAudioDevice(self->deviceID, 0);
return ALC_TRUE;
}
static void ALCsdl2Backend_stop(ALCsdl2Backend *self)
{
SDL_PauseAudioDevice(self->deviceID, 1);
}
static void ALCsdl2Backend_lock(ALCsdl2Backend *self)
{
SDL_LockAudioDevice(self->deviceID);
}
static void ALCsdl2Backend_unlock(ALCsdl2Backend *self)
{
SDL_UnlockAudioDevice(self->deviceID);
}
typedef struct ALCsdl2BackendFactory {
DERIVE_FROM_TYPE(ALCbackendFactory);
} ALCsdl2BackendFactory;
#define ALCsdl2BACKENDFACTORY_INITIALIZER { { GET_VTABLE2(ALCsdl2BackendFactory, ALCbackendFactory) } }
ALCbackendFactory *ALCsdl2BackendFactory_getFactory(void);
static ALCboolean ALCsdl2BackendFactory_init(ALCsdl2BackendFactory *self);
static void ALCsdl2BackendFactory_deinit(ALCsdl2BackendFactory *self);
static ALCboolean ALCsdl2BackendFactory_querySupport(ALCsdl2BackendFactory *self, ALCbackend_Type type);
static void ALCsdl2BackendFactory_probe(ALCsdl2BackendFactory *self, enum DevProbe type);
static ALCbackend* ALCsdl2BackendFactory_createBackend(ALCsdl2BackendFactory *self, ALCdevice *device, ALCbackend_Type type);
DEFINE_ALCBACKENDFACTORY_VTABLE(ALCsdl2BackendFactory);
ALCbackendFactory *ALCsdl2BackendFactory_getFactory(void)
{
static ALCsdl2BackendFactory factory = ALCsdl2BACKENDFACTORY_INITIALIZER;
return STATIC_CAST(ALCbackendFactory, &factory);
}
static ALCboolean ALCsdl2BackendFactory_init(ALCsdl2BackendFactory* UNUSED(self))
{
if(SDL_InitSubSystem(SDL_INIT_AUDIO) == 0)
return AL_TRUE;
return ALC_FALSE;
}
static void ALCsdl2BackendFactory_deinit(ALCsdl2BackendFactory* UNUSED(self))
{
SDL_QuitSubSystem(SDL_INIT_AUDIO);
}
static ALCboolean ALCsdl2BackendFactory_querySupport(ALCsdl2BackendFactory* UNUSED(self), ALCbackend_Type type)
{
if(type == ALCbackend_Playback)
return ALC_TRUE;
return ALC_FALSE;
}
static void ALCsdl2BackendFactory_probe(ALCsdl2BackendFactory* UNUSED(self), enum DevProbe type)
{
int num_devices, i;
al_string name;
if(type != ALL_DEVICE_PROBE)
return;
AL_STRING_INIT(name);
num_devices = SDL_GetNumAudioDevices(SDL_FALSE);
AppendAllDevicesList(defaultDeviceName);
for(i = 0;i < num_devices;++i)
{
alstr_copy_cstr(&name, DEVNAME_PREFIX);
alstr_append_cstr(&name, SDL_GetAudioDeviceName(i, SDL_FALSE));
AppendAllDevicesList(alstr_get_cstr(name));
}
alstr_reset(&name);
}
static ALCbackend* ALCsdl2BackendFactory_createBackend(ALCsdl2BackendFactory* UNUSED(self), ALCdevice *device, ALCbackend_Type type)
{
if(type == ALCbackend_Playback)
{
ALCsdl2Backend *backend;
NEW_OBJ(backend, ALCsdl2Backend)(device);
if(!backend) return NULL;
return STATIC_CAST(ALCbackend, backend);
}
return NULL;
}

View file

@ -0,0 +1,216 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2018 by authors.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include "backends/sdl2.h"
#include <cassert>
#include <cstdlib>
#include <cstring>
#include <string>
#include "alcmain.h"
#include "almalloc.h"
#include "alu.h"
#include "core/logging.h"
#include <SDL2/SDL.h>
namespace {
#ifdef _WIN32
#define DEVNAME_PREFIX "OpenAL Soft on "
#else
#define DEVNAME_PREFIX ""
#endif
constexpr char defaultDeviceName[] = DEVNAME_PREFIX "Default Device";
struct Sdl2Backend final : public BackendBase {
Sdl2Backend(ALCdevice *device) noexcept : BackendBase{device} { }
~Sdl2Backend() override;
void audioCallback(Uint8 *stream, int len) noexcept;
static void audioCallbackC(void *ptr, Uint8 *stream, int len) noexcept
{ static_cast<Sdl2Backend*>(ptr)->audioCallback(stream, len); }
void open(const char *name) override;
bool reset() override;
void start() override;
void stop() override;
SDL_AudioDeviceID mDeviceID{0u};
uint mFrameSize{0};
uint mFrequency{0u};
DevFmtChannels mFmtChans{};
DevFmtType mFmtType{};
uint mUpdateSize{0u};
DEF_NEWDEL(Sdl2Backend)
};
Sdl2Backend::~Sdl2Backend()
{
if(mDeviceID)
SDL_CloseAudioDevice(mDeviceID);
mDeviceID = 0;
}
void Sdl2Backend::audioCallback(Uint8 *stream, int len) noexcept
{
const auto ulen = static_cast<unsigned int>(len);
assert((ulen % mFrameSize) == 0);
mDevice->renderSamples(stream, ulen / mFrameSize, mDevice->channelsFromFmt());
}
void Sdl2Backend::open(const char *name)
{
SDL_AudioSpec want{}, have{};
want.freq = static_cast<int>(mDevice->Frequency);
switch(mDevice->FmtType)
{
case DevFmtUByte: want.format = AUDIO_U8; break;
case DevFmtByte: want.format = AUDIO_S8; break;
case DevFmtUShort: want.format = AUDIO_U16SYS; break;
case DevFmtShort: want.format = AUDIO_S16SYS; break;
case DevFmtUInt: /* fall-through */
case DevFmtInt: want.format = AUDIO_S32SYS; break;
case DevFmtFloat: want.format = AUDIO_F32; break;
}
want.channels = (mDevice->FmtChans == DevFmtMono) ? 1 : 2;
want.samples = static_cast<Uint16>(mDevice->UpdateSize);
want.callback = &Sdl2Backend::audioCallbackC;
want.userdata = this;
/* Passing nullptr to SDL_OpenAudioDevice opens a default, which isn't
* necessarily the first in the list.
*/
if(!name || strcmp(name, defaultDeviceName) == 0)
mDeviceID = SDL_OpenAudioDevice(nullptr, SDL_FALSE, &want, &have,
SDL_AUDIO_ALLOW_ANY_CHANGE);
else
{
const size_t prefix_len = strlen(DEVNAME_PREFIX);
if(strncmp(name, DEVNAME_PREFIX, prefix_len) == 0)
mDeviceID = SDL_OpenAudioDevice(name+prefix_len, SDL_FALSE, &want, &have,
SDL_AUDIO_ALLOW_ANY_CHANGE);
else
mDeviceID = SDL_OpenAudioDevice(name, SDL_FALSE, &want, &have,
SDL_AUDIO_ALLOW_ANY_CHANGE);
}
if(mDeviceID == 0)
throw al::backend_exception{al::backend_error::NoDevice, "%s", SDL_GetError()};
mDevice->Frequency = static_cast<uint>(have.freq);
if(have.channels == 1)
mDevice->FmtChans = DevFmtMono;
else if(have.channels == 2)
mDevice->FmtChans = DevFmtStereo;
else
throw al::backend_exception{al::backend_error::DeviceError,
"Unhandled SDL channel count: %d", int{have.channels}};
switch(have.format)
{
case AUDIO_U8: mDevice->FmtType = DevFmtUByte; break;
case AUDIO_S8: mDevice->FmtType = DevFmtByte; break;
case AUDIO_U16SYS: mDevice->FmtType = DevFmtUShort; break;
case AUDIO_S16SYS: mDevice->FmtType = DevFmtShort; break;
case AUDIO_S32SYS: mDevice->FmtType = DevFmtInt; break;
case AUDIO_F32SYS: mDevice->FmtType = DevFmtFloat; break;
default:
throw al::backend_exception{al::backend_error::DeviceError, "Unhandled SDL format: 0x%04x",
have.format};
}
mDevice->UpdateSize = have.samples;
mDevice->BufferSize = have.samples * 2; /* SDL always (tries to) use two periods. */
mFrameSize = mDevice->frameSizeFromFmt();
mFrequency = mDevice->Frequency;
mFmtChans = mDevice->FmtChans;
mFmtType = mDevice->FmtType;
mUpdateSize = mDevice->UpdateSize;
mDevice->DeviceName = name ? name : defaultDeviceName;
}
bool Sdl2Backend::reset()
{
mDevice->Frequency = mFrequency;
mDevice->FmtChans = mFmtChans;
mDevice->FmtType = mFmtType;
mDevice->UpdateSize = mUpdateSize;
mDevice->BufferSize = mUpdateSize * 2;
setDefaultWFXChannelOrder();
return true;
}
void Sdl2Backend::start()
{ SDL_PauseAudioDevice(mDeviceID, 0); }
void Sdl2Backend::stop()
{ SDL_PauseAudioDevice(mDeviceID, 1); }
} // namespace
BackendFactory &SDL2BackendFactory::getFactory()
{
static SDL2BackendFactory factory{};
return factory;
}
bool SDL2BackendFactory::init()
{ return (SDL_InitSubSystem(SDL_INIT_AUDIO) == 0); }
bool SDL2BackendFactory::querySupport(BackendType type)
{ return type == BackendType::Playback; }
std::string SDL2BackendFactory::probe(BackendType type)
{
std::string outnames;
if(type != BackendType::Playback)
return outnames;
int num_devices{SDL_GetNumAudioDevices(SDL_FALSE)};
/* Includes null char. */
outnames.append(defaultDeviceName, sizeof(defaultDeviceName));
for(int i{0};i < num_devices;++i)
{
std::string name{DEVNAME_PREFIX};
name += SDL_GetAudioDeviceName(i, SDL_FALSE);
if(!name.empty())
outnames.append(name.c_str(), name.length()+1);
}
return outnames;
}
BackendPtr SDL2BackendFactory::createBackend(ALCdevice *device, BackendType type)
{
if(type == BackendType::Playback)
return BackendPtr{new Sdl2Backend{device}};
return nullptr;
}

View file

@ -0,0 +1,19 @@
#ifndef BACKENDS_SDL2_H
#define BACKENDS_SDL2_H
#include "backends/base.h"
struct SDL2BackendFactory final : public BackendFactory {
public:
bool init() override;
bool querySupport(BackendType type) override;
std::string probe(BackendType type) override;
BackendPtr createBackend(ALCdevice *device, BackendType type) override;
static BackendFactory &getFactory();
};
#endif /* BACKENDS_SDL2_H */

View file

@ -1,342 +0,0 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 1999-2007 by authors.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "alMain.h"
#include "alu.h"
#include "threads.h"
#include "backends/base.h"
#include <sndio.h>
typedef struct ALCsndioBackend {
DERIVE_FROM_TYPE(ALCbackend);
struct sio_hdl *sndHandle;
ALvoid *mix_data;
ALsizei data_size;
ATOMIC(int) killNow;
althrd_t thread;
} ALCsndioBackend;
static int ALCsndioBackend_mixerProc(void *ptr);
static void ALCsndioBackend_Construct(ALCsndioBackend *self, ALCdevice *device);
static void ALCsndioBackend_Destruct(ALCsndioBackend *self);
static ALCenum ALCsndioBackend_open(ALCsndioBackend *self, const ALCchar *name);
static ALCboolean ALCsndioBackend_reset(ALCsndioBackend *self);
static ALCboolean ALCsndioBackend_start(ALCsndioBackend *self);
static void ALCsndioBackend_stop(ALCsndioBackend *self);
static DECLARE_FORWARD2(ALCsndioBackend, ALCbackend, ALCenum, captureSamples, void*, ALCuint)
static DECLARE_FORWARD(ALCsndioBackend, ALCbackend, ALCuint, availableSamples)
static DECLARE_FORWARD(ALCsndioBackend, ALCbackend, ClockLatency, getClockLatency)
static DECLARE_FORWARD(ALCsndioBackend, ALCbackend, void, lock)
static DECLARE_FORWARD(ALCsndioBackend, ALCbackend, void, unlock)
DECLARE_DEFAULT_ALLOCATORS(ALCsndioBackend)
DEFINE_ALCBACKEND_VTABLE(ALCsndioBackend);
static const ALCchar sndio_device[] = "SndIO Default";
static void ALCsndioBackend_Construct(ALCsndioBackend *self, ALCdevice *device)
{
ALCbackend_Construct(STATIC_CAST(ALCbackend, self), device);
SET_VTABLE2(ALCsndioBackend, ALCbackend, self);
self->sndHandle = NULL;
self->mix_data = NULL;
ATOMIC_INIT(&self->killNow, AL_TRUE);
}
static void ALCsndioBackend_Destruct(ALCsndioBackend *self)
{
if(self->sndHandle)
sio_close(self->sndHandle);
self->sndHandle = NULL;
al_free(self->mix_data);
self->mix_data = NULL;
ALCbackend_Destruct(STATIC_CAST(ALCbackend, self));
}
static int ALCsndioBackend_mixerProc(void *ptr)
{
ALCsndioBackend *self = (ALCsndioBackend*)ptr;
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
ALsizei frameSize;
size_t wrote;
SetRTPriority();
althrd_setname(althrd_current(), MIXER_THREAD_NAME);
frameSize = FrameSizeFromDevFmt(device->FmtChans, device->FmtType, device->AmbiOrder);
while(!ATOMIC_LOAD(&self->killNow, almemory_order_acquire) &&
ATOMIC_LOAD(&device->Connected, almemory_order_acquire))
{
ALsizei len = self->data_size;
ALubyte *WritePtr = self->mix_data;
ALCsndioBackend_lock(self);
aluMixData(device, WritePtr, len/frameSize);
ALCsndioBackend_unlock(self);
while(len > 0 && !ATOMIC_LOAD(&self->killNow, almemory_order_acquire))
{
wrote = sio_write(self->sndHandle, WritePtr, len);
if(wrote == 0)
{
ERR("sio_write failed\n");
ALCdevice_Lock(device);
aluHandleDisconnect(device, "Failed to write playback samples");
ALCdevice_Unlock(device);
break;
}
len -= wrote;
WritePtr += wrote;
}
}
return 0;
}
static ALCenum ALCsndioBackend_open(ALCsndioBackend *self, const ALCchar *name)
{
ALCdevice *device = STATIC_CAST(ALCbackend,self)->mDevice;
if(!name)
name = sndio_device;
else if(strcmp(name, sndio_device) != 0)
return ALC_INVALID_VALUE;
self->sndHandle = sio_open(NULL, SIO_PLAY, 0);
if(self->sndHandle == NULL)
{
ERR("Could not open device\n");
return ALC_INVALID_VALUE;
}
alstr_copy_cstr(&device->DeviceName, name);
return ALC_NO_ERROR;
}
static ALCboolean ALCsndioBackend_reset(ALCsndioBackend *self)
{
ALCdevice *device = STATIC_CAST(ALCbackend,self)->mDevice;
struct sio_par par;
sio_initpar(&par);
par.rate = device->Frequency;
par.pchan = ((device->FmtChans != DevFmtMono) ? 2 : 1);
switch(device->FmtType)
{
case DevFmtByte:
par.bits = 8;
par.sig = 1;
break;
case DevFmtUByte:
par.bits = 8;
par.sig = 0;
break;
case DevFmtFloat:
case DevFmtShort:
par.bits = 16;
par.sig = 1;
break;
case DevFmtUShort:
par.bits = 16;
par.sig = 0;
break;
case DevFmtInt:
par.bits = 32;
par.sig = 1;
break;
case DevFmtUInt:
par.bits = 32;
par.sig = 0;
break;
}
par.le = SIO_LE_NATIVE;
par.round = device->UpdateSize;
par.appbufsz = device->UpdateSize * (device->NumUpdates-1);
if(!par.appbufsz) par.appbufsz = device->UpdateSize;
if(!sio_setpar(self->sndHandle, &par) || !sio_getpar(self->sndHandle, &par))
{
ERR("Failed to set device parameters\n");
return ALC_FALSE;
}
if(par.bits != par.bps*8)
{
ERR("Padded samples not supported (%u of %u bits)\n", par.bits, par.bps*8);
return ALC_FALSE;
}
device->Frequency = par.rate;
device->FmtChans = ((par.pchan==1) ? DevFmtMono : DevFmtStereo);
if(par.bits == 8 && par.sig == 1)
device->FmtType = DevFmtByte;
else if(par.bits == 8 && par.sig == 0)
device->FmtType = DevFmtUByte;
else if(par.bits == 16 && par.sig == 1)
device->FmtType = DevFmtShort;
else if(par.bits == 16 && par.sig == 0)
device->FmtType = DevFmtUShort;
else if(par.bits == 32 && par.sig == 1)
device->FmtType = DevFmtInt;
else if(par.bits == 32 && par.sig == 0)
device->FmtType = DevFmtUInt;
else
{
ERR("Unhandled sample format: %s %u-bit\n", (par.sig?"signed":"unsigned"), par.bits);
return ALC_FALSE;
}
device->UpdateSize = par.round;
device->NumUpdates = (par.bufsz/par.round) + 1;
SetDefaultChannelOrder(device);
return ALC_TRUE;
}
static ALCboolean ALCsndioBackend_start(ALCsndioBackend *self)
{
ALCdevice *device = STATIC_CAST(ALCbackend,self)->mDevice;
self->data_size = device->UpdateSize * FrameSizeFromDevFmt(
device->FmtChans, device->FmtType, device->AmbiOrder
);
al_free(self->mix_data);
self->mix_data = al_calloc(16, self->data_size);
if(!sio_start(self->sndHandle))
{
ERR("Error starting playback\n");
return ALC_FALSE;
}
ATOMIC_STORE(&self->killNow, AL_FALSE, almemory_order_release);
if(althrd_create(&self->thread, ALCsndioBackend_mixerProc, self) != althrd_success)
{
sio_stop(self->sndHandle);
return ALC_FALSE;
}
return ALC_TRUE;
}
static void ALCsndioBackend_stop(ALCsndioBackend *self)
{
int res;
if(ATOMIC_EXCHANGE(&self->killNow, AL_TRUE, almemory_order_acq_rel))
return;
althrd_join(self->thread, &res);
if(!sio_stop(self->sndHandle))
ERR("Error stopping device\n");
al_free(self->mix_data);
self->mix_data = NULL;
}
typedef struct ALCsndioBackendFactory {
DERIVE_FROM_TYPE(ALCbackendFactory);
} ALCsndioBackendFactory;
#define ALCSNDIOBACKENDFACTORY_INITIALIZER { { GET_VTABLE2(ALCsndioBackendFactory, ALCbackendFactory) } }
ALCbackendFactory *ALCsndioBackendFactory_getFactory(void);
static ALCboolean ALCsndioBackendFactory_init(ALCsndioBackendFactory *self);
static DECLARE_FORWARD(ALCsndioBackendFactory, ALCbackendFactory, void, deinit)
static ALCboolean ALCsndioBackendFactory_querySupport(ALCsndioBackendFactory *self, ALCbackend_Type type);
static void ALCsndioBackendFactory_probe(ALCsndioBackendFactory *self, enum DevProbe type);
static ALCbackend* ALCsndioBackendFactory_createBackend(ALCsndioBackendFactory *self, ALCdevice *device, ALCbackend_Type type);
DEFINE_ALCBACKENDFACTORY_VTABLE(ALCsndioBackendFactory);
ALCbackendFactory *ALCsndioBackendFactory_getFactory(void)
{
static ALCsndioBackendFactory factory = ALCSNDIOBACKENDFACTORY_INITIALIZER;
return STATIC_CAST(ALCbackendFactory, &factory);
}
static ALCboolean ALCsndioBackendFactory_init(ALCsndioBackendFactory* UNUSED(self))
{
/* No dynamic loading */
return ALC_TRUE;
}
static ALCboolean ALCsndioBackendFactory_querySupport(ALCsndioBackendFactory* UNUSED(self), ALCbackend_Type type)
{
if(type == ALCbackend_Playback)
return ALC_TRUE;
return ALC_FALSE;
}
static void ALCsndioBackendFactory_probe(ALCsndioBackendFactory* UNUSED(self), enum DevProbe type)
{
switch(type)
{
case ALL_DEVICE_PROBE:
AppendAllDevicesList(sndio_device);
break;
case CAPTURE_DEVICE_PROBE:
break;
}
}
static ALCbackend* ALCsndioBackendFactory_createBackend(ALCsndioBackendFactory* UNUSED(self), ALCdevice *device, ALCbackend_Type type)
{
if(type == ALCbackend_Playback)
{
ALCsndioBackend *backend;
NEW_OBJ(backend, ALCsndioBackend)(device);
if(!backend) return NULL;
return STATIC_CAST(ALCbackend, backend);
}
return NULL;
}

View file

@ -0,0 +1,517 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 1999-2007 by authors.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include "backends/sndio.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <thread>
#include <functional>
#include "alcmain.h"
#include "alu.h"
#include "core/logging.h"
#include "ringbuffer.h"
#include "threads.h"
#include "vector.h"
#include <sndio.h>
namespace {
static const char sndio_device[] = "SndIO Default";
struct SndioPlayback final : public BackendBase {
SndioPlayback(ALCdevice *device) noexcept : BackendBase{device} { }
~SndioPlayback() override;
int mixerProc();
void open(const char *name) override;
bool reset() override;
void start() override;
void stop() override;
sio_hdl *mSndHandle{nullptr};
al::vector<al::byte> mBuffer;
std::atomic<bool> mKillNow{true};
std::thread mThread;
DEF_NEWDEL(SndioPlayback)
};
SndioPlayback::~SndioPlayback()
{
if(mSndHandle)
sio_close(mSndHandle);
mSndHandle = nullptr;
}
int SndioPlayback::mixerProc()
{
sio_par par;
sio_initpar(&par);
if(!sio_getpar(mSndHandle, &par))
{
mDevice->handleDisconnect("Failed to get device parameters");
return 1;
}
const size_t frameStep{par.pchan};
const size_t frameSize{frameStep * par.bps};
SetRTPriority();
althrd_setname(MIXER_THREAD_NAME);
while(!mKillNow.load(std::memory_order_acquire)
&& mDevice->Connected.load(std::memory_order_acquire))
{
al::byte *WritePtr{mBuffer.data()};
size_t len{mBuffer.size()};
mDevice->renderSamples(WritePtr, static_cast<uint>(len/frameSize), frameStep);
while(len > 0 && !mKillNow.load(std::memory_order_acquire))
{
size_t wrote{sio_write(mSndHandle, WritePtr, len)};
if(wrote == 0)
{
ERR("sio_write failed\n");
mDevice->handleDisconnect("Failed to write playback samples");
break;
}
len -= wrote;
WritePtr += wrote;
}
}
return 0;
}
void SndioPlayback::open(const char *name)
{
if(!name)
name = sndio_device;
else if(strcmp(name, sndio_device) != 0)
throw al::backend_exception{al::backend_error::NoDevice, "Device name \"%s\" not found",
name};
mSndHandle = sio_open(nullptr, SIO_PLAY, 0);
if(mSndHandle == nullptr)
throw al::backend_exception{al::backend_error::NoDevice, "Could not open backend device"};
mDevice->DeviceName = name;
}
bool SndioPlayback::reset()
{
sio_par par;
sio_initpar(&par);
par.rate = mDevice->Frequency;
switch(mDevice->FmtChans)
{
case DevFmtMono : par.pchan = 1; break;
case DevFmtQuad : par.pchan = 4; break;
case DevFmtX51Rear: // fall-through - "Similar to 5.1, except using rear channels instead of sides"
case DevFmtX51 : par.pchan = 6; break;
case DevFmtX61 : par.pchan = 7; break;
case DevFmtX71 : par.pchan = 8; break;
// fall back to stereo for Ambi3D
case DevFmtAmbi3D : // fall-through
case DevFmtStereo : par.pchan = 2; break;
}
switch(mDevice->FmtType)
{
case DevFmtByte:
par.bits = 8;
par.sig = 1;
break;
case DevFmtUByte:
par.bits = 8;
par.sig = 0;
break;
case DevFmtFloat:
case DevFmtShort:
par.bits = 16;
par.sig = 1;
break;
case DevFmtUShort:
par.bits = 16;
par.sig = 0;
break;
case DevFmtInt:
par.bits = 32;
par.sig = 1;
break;
case DevFmtUInt:
par.bits = 32;
par.sig = 0;
break;
}
par.le = SIO_LE_NATIVE;
par.round = mDevice->UpdateSize;
par.appbufsz = mDevice->BufferSize - mDevice->UpdateSize;
if(!par.appbufsz) par.appbufsz = mDevice->UpdateSize;
if(!sio_setpar(mSndHandle, &par) || !sio_getpar(mSndHandle, &par))
{
ERR("Failed to set device parameters\n");
return false;
}
if(par.bits != par.bps*8)
{
ERR("Padded samples not supported (%u of %u bits)\n", par.bits, par.bps*8);
return false;
}
if(par.le != SIO_LE_NATIVE)
{
ERR("Non-native-endian samples not supported (got %s-endian)\n",
par.le ? "little" : "big");
return false;
}
mDevice->Frequency = par.rate;
if(par.pchan < 2)
{
if(mDevice->FmtChans != DevFmtMono)
{
WARN("Got %u channel for %s\n", par.pchan, DevFmtChannelsString(mDevice->FmtChans));
mDevice->FmtChans = DevFmtMono;
}
}
else if((par.pchan == 2 && mDevice->FmtChans != DevFmtStereo)
|| par.pchan == 3
|| (par.pchan == 4 && mDevice->FmtChans != DevFmtQuad)
|| par.pchan == 5
|| (par.pchan == 6 && mDevice->FmtChans != DevFmtX51 && mDevice->FmtChans != DevFmtX51Rear)
|| (par.pchan == 7 && mDevice->FmtChans != DevFmtX61)
|| (par.pchan == 8 && mDevice->FmtChans != DevFmtX71)
|| par.pchan > 8)
{
WARN("Got %u channels for %s\n", par.pchan, DevFmtChannelsString(mDevice->FmtChans));
mDevice->FmtChans = DevFmtStereo;
}
if(par.bits == 8 && par.sig == 1)
mDevice->FmtType = DevFmtByte;
else if(par.bits == 8 && par.sig == 0)
mDevice->FmtType = DevFmtUByte;
else if(par.bits == 16 && par.sig == 1)
mDevice->FmtType = DevFmtShort;
else if(par.bits == 16 && par.sig == 0)
mDevice->FmtType = DevFmtUShort;
else if(par.bits == 32 && par.sig == 1)
mDevice->FmtType = DevFmtInt;
else if(par.bits == 32 && par.sig == 0)
mDevice->FmtType = DevFmtUInt;
else
{
ERR("Unhandled sample format: %s %u-bit\n", (par.sig?"signed":"unsigned"), par.bits);
return false;
}
setDefaultChannelOrder();
mDevice->UpdateSize = par.round;
mDevice->BufferSize = par.bufsz + par.round;
mBuffer.resize(mDevice->UpdateSize * par.pchan*par.bps);
if(par.sig == 1)
std::fill(mBuffer.begin(), mBuffer.end(), al::byte{});
else if(par.bits == 8)
std::fill_n(mBuffer.data(), mBuffer.size(), al::byte(0x80));
else if(par.bits == 16)
std::fill_n(reinterpret_cast<uint16_t*>(mBuffer.data()), mBuffer.size()/2, 0x8000);
else if(par.bits == 32)
std::fill_n(reinterpret_cast<uint32_t*>(mBuffer.data()), mBuffer.size()/4, 0x80000000u);
return true;
}
void SndioPlayback::start()
{
if(!sio_start(mSndHandle))
throw al::backend_exception{al::backend_error::DeviceError, "Error starting playback"};
try {
mKillNow.store(false, std::memory_order_release);
mThread = std::thread{std::mem_fn(&SndioPlayback::mixerProc), this};
}
catch(std::exception& e) {
sio_stop(mSndHandle);
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to start mixing thread: %s", e.what()};
}
}
void SndioPlayback::stop()
{
if(mKillNow.exchange(true, std::memory_order_acq_rel) || !mThread.joinable())
return;
mThread.join();
if(!sio_stop(mSndHandle))
ERR("Error stopping device\n");
}
struct SndioCapture final : public BackendBase {
SndioCapture(ALCdevice *device) noexcept : BackendBase{device} { }
~SndioCapture() override;
int recordProc();
void open(const char *name) override;
void start() override;
void stop() override;
void captureSamples(al::byte *buffer, uint samples) override;
uint availableSamples() override;
sio_hdl *mSndHandle{nullptr};
RingBufferPtr mRing;
std::atomic<bool> mKillNow{true};
std::thread mThread;
DEF_NEWDEL(SndioCapture)
};
SndioCapture::~SndioCapture()
{
if(mSndHandle)
sio_close(mSndHandle);
mSndHandle = nullptr;
}
int SndioCapture::recordProc()
{
SetRTPriority();
althrd_setname(RECORD_THREAD_NAME);
const uint frameSize{mDevice->frameSizeFromFmt()};
while(!mKillNow.load(std::memory_order_acquire)
&& mDevice->Connected.load(std::memory_order_acquire))
{
auto data = mRing->getWriteVector();
size_t todo{data.first.len + data.second.len};
if(todo == 0)
{
static char junk[4096];
sio_read(mSndHandle, junk,
minz(sizeof(junk)/frameSize, mDevice->UpdateSize)*frameSize);
continue;
}
size_t total{0u};
data.first.len *= frameSize;
data.second.len *= frameSize;
todo = minz(todo, mDevice->UpdateSize) * frameSize;
while(total < todo)
{
if(!data.first.len)
data.first = data.second;
size_t got{sio_read(mSndHandle, data.first.buf, minz(todo-total, data.first.len))};
if(!got)
{
mDevice->handleDisconnect("Failed to read capture samples");
break;
}
data.first.buf += got;
data.first.len -= got;
total += got;
}
mRing->writeAdvance(total / frameSize);
}
return 0;
}
void SndioCapture::open(const char *name)
{
if(!name)
name = sndio_device;
else if(strcmp(name, sndio_device) != 0)
throw al::backend_exception{al::backend_error::NoDevice, "Device name \"%s\" not found",
name};
mSndHandle = sio_open(nullptr, SIO_REC, 0);
if(mSndHandle == nullptr)
throw al::backend_exception{al::backend_error::NoDevice, "Could not open backend device"};
sio_par par;
sio_initpar(&par);
switch(mDevice->FmtType)
{
case DevFmtByte:
par.bps = 1;
par.sig = 1;
break;
case DevFmtUByte:
par.bps = 1;
par.sig = 0;
break;
case DevFmtShort:
par.bps = 2;
par.sig = 1;
break;
case DevFmtUShort:
par.bps = 2;
par.sig = 0;
break;
case DevFmtInt:
par.bps = 4;
par.sig = 1;
break;
case DevFmtUInt:
par.bps = 4;
par.sig = 0;
break;
case DevFmtFloat:
throw al::backend_exception{al::backend_error::DeviceError,
"%s capture samples not supported", DevFmtTypeString(mDevice->FmtType)};
}
par.bits = par.bps * 8;
par.le = SIO_LE_NATIVE;
par.msb = SIO_LE_NATIVE ? 0 : 1;
par.rchan = mDevice->channelsFromFmt();
par.rate = mDevice->Frequency;
par.appbufsz = maxu(mDevice->BufferSize, mDevice->Frequency/10);
par.round = minu(par.appbufsz, mDevice->Frequency/40);
mDevice->UpdateSize = par.round;
mDevice->BufferSize = par.appbufsz;
if(!sio_setpar(mSndHandle, &par) || !sio_getpar(mSndHandle, &par))
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to set device praameters"};
if(par.bits != par.bps*8)
throw al::backend_exception{al::backend_error::DeviceError,
"Padded samples not supported (got %u of %u bits)", par.bits, par.bps*8};
if(!((mDevice->FmtType == DevFmtByte && par.bits == 8 && par.sig != 0)
|| (mDevice->FmtType == DevFmtUByte && par.bits == 8 && par.sig == 0)
|| (mDevice->FmtType == DevFmtShort && par.bits == 16 && par.sig != 0)
|| (mDevice->FmtType == DevFmtUShort && par.bits == 16 && par.sig == 0)
|| (mDevice->FmtType == DevFmtInt && par.bits == 32 && par.sig != 0)
|| (mDevice->FmtType == DevFmtUInt && par.bits == 32 && par.sig == 0))
|| mDevice->channelsFromFmt() != par.rchan || mDevice->Frequency != par.rate)
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to set format %s %s %uhz, got %c%u %u-channel %uhz instead",
DevFmtTypeString(mDevice->FmtType), DevFmtChannelsString(mDevice->FmtChans),
mDevice->Frequency, par.sig?'s':'u', par.bits, par.rchan, par.rate};
mRing = RingBuffer::Create(mDevice->BufferSize, par.bps*par.rchan, false);
setDefaultChannelOrder();
mDevice->DeviceName = name;
}
void SndioCapture::start()
{
if(!sio_start(mSndHandle))
throw al::backend_exception{al::backend_error::DeviceError, "Error starting capture"};
try {
mKillNow.store(false, std::memory_order_release);
mThread = std::thread{std::mem_fn(&SndioCapture::recordProc), this};
}
catch(std::exception& e) {
sio_stop(mSndHandle);
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to start capture thread: %s", e.what()};
}
}
void SndioCapture::stop()
{
if(mKillNow.exchange(true, std::memory_order_acq_rel) || !mThread.joinable())
return;
mThread.join();
if(!sio_stop(mSndHandle))
ERR("Error stopping device\n");
}
void SndioCapture::captureSamples(al::byte *buffer, uint samples)
{ mRing->read(buffer, samples); }
uint SndioCapture::availableSamples()
{ return static_cast<uint>(mRing->readSpace()); }
} // namespace
BackendFactory &SndIOBackendFactory::getFactory()
{
static SndIOBackendFactory factory{};
return factory;
}
bool SndIOBackendFactory::init()
{ return true; }
bool SndIOBackendFactory::querySupport(BackendType type)
{ return (type == BackendType::Playback || type == BackendType::Capture); }
std::string SndIOBackendFactory::probe(BackendType type)
{
std::string outnames;
switch(type)
{
case BackendType::Playback:
case BackendType::Capture:
/* Includes null char. */
outnames.append(sndio_device, sizeof(sndio_device));
break;
}
return outnames;
}
BackendPtr SndIOBackendFactory::createBackend(ALCdevice *device, BackendType type)
{
if(type == BackendType::Playback)
return BackendPtr{new SndioPlayback{device}};
if(type == BackendType::Capture)
return BackendPtr{new SndioCapture{device}};
return nullptr;
}

View file

@ -0,0 +1,19 @@
#ifndef BACKENDS_SNDIO_H
#define BACKENDS_SNDIO_H
#include "backends/base.h"
struct SndIOBackendFactory final : public BackendFactory {
public:
bool init() override;
bool querySupport(BackendType type) override;
std::string probe(BackendType type) override;
BackendPtr createBackend(ALCdevice *device, BackendType type) override;
static BackendFactory &getFactory();
};
#endif /* BACKENDS_SNDIO_H */

View file

@ -1,360 +0,0 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 1999-2007 by authors.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <sys/ioctl.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <stdlib.h>
#include <stdio.h>
#include <memory.h>
#include <unistd.h>
#include <errno.h>
#include <math.h>
#include "alMain.h"
#include "alu.h"
#include "alconfig.h"
#include "threads.h"
#include "compat.h"
#include "backends/base.h"
#include <sys/audioio.h>
typedef struct ALCsolarisBackend {
DERIVE_FROM_TYPE(ALCbackend);
int fd;
ALubyte *mix_data;
int data_size;
ATOMIC(ALenum) killNow;
althrd_t thread;
} ALCsolarisBackend;
static int ALCsolarisBackend_mixerProc(void *ptr);
static void ALCsolarisBackend_Construct(ALCsolarisBackend *self, ALCdevice *device);
static void ALCsolarisBackend_Destruct(ALCsolarisBackend *self);
static ALCenum ALCsolarisBackend_open(ALCsolarisBackend *self, const ALCchar *name);
static ALCboolean ALCsolarisBackend_reset(ALCsolarisBackend *self);
static ALCboolean ALCsolarisBackend_start(ALCsolarisBackend *self);
static void ALCsolarisBackend_stop(ALCsolarisBackend *self);
static DECLARE_FORWARD2(ALCsolarisBackend, ALCbackend, ALCenum, captureSamples, void*, ALCuint)
static DECLARE_FORWARD(ALCsolarisBackend, ALCbackend, ALCuint, availableSamples)
static DECLARE_FORWARD(ALCsolarisBackend, ALCbackend, ClockLatency, getClockLatency)
static DECLARE_FORWARD(ALCsolarisBackend, ALCbackend, void, lock)
static DECLARE_FORWARD(ALCsolarisBackend, ALCbackend, void, unlock)
DECLARE_DEFAULT_ALLOCATORS(ALCsolarisBackend)
DEFINE_ALCBACKEND_VTABLE(ALCsolarisBackend);
static const ALCchar solaris_device[] = "Solaris Default";
static const char *solaris_driver = "/dev/audio";
static void ALCsolarisBackend_Construct(ALCsolarisBackend *self, ALCdevice *device)
{
ALCbackend_Construct(STATIC_CAST(ALCbackend, self), device);
SET_VTABLE2(ALCsolarisBackend, ALCbackend, self);
self->fd = -1;
self->mix_data = NULL;
ATOMIC_INIT(&self->killNow, AL_FALSE);
}
static void ALCsolarisBackend_Destruct(ALCsolarisBackend *self)
{
if(self->fd != -1)
close(self->fd);
self->fd = -1;
free(self->mix_data);
self->mix_data = NULL;
self->data_size = 0;
ALCbackend_Destruct(STATIC_CAST(ALCbackend, self));
}
static int ALCsolarisBackend_mixerProc(void *ptr)
{
ALCsolarisBackend *self = ptr;
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
struct timeval timeout;
ALubyte *write_ptr;
ALint frame_size;
ALint to_write;
ssize_t wrote;
fd_set wfds;
int sret;
SetRTPriority();
althrd_setname(althrd_current(), MIXER_THREAD_NAME);
frame_size = FrameSizeFromDevFmt(device->FmtChans, device->FmtType, device->AmbiOrder);
ALCsolarisBackend_lock(self);
while(!ATOMIC_LOAD(&self->killNow, almemory_order_acquire) &&
ATOMIC_LOAD(&device->Connected, almemory_order_acquire))
{
FD_ZERO(&wfds);
FD_SET(self->fd, &wfds);
timeout.tv_sec = 1;
timeout.tv_usec = 0;
ALCsolarisBackend_unlock(self);
sret = select(self->fd+1, NULL, &wfds, NULL, &timeout);
ALCsolarisBackend_lock(self);
if(sret < 0)
{
if(errno == EINTR)
continue;
ERR("select failed: %s\n", strerror(errno));
aluHandleDisconnect(device, "Failed to wait for playback buffer: %s", strerror(errno));
break;
}
else if(sret == 0)
{
WARN("select timeout\n");
continue;
}
write_ptr = self->mix_data;
to_write = self->data_size;
aluMixData(device, write_ptr, to_write/frame_size);
while(to_write > 0 && !ATOMIC_LOAD_SEQ(&self->killNow))
{
wrote = write(self->fd, write_ptr, to_write);
if(wrote < 0)
{
if(errno == EAGAIN || errno == EWOULDBLOCK || errno == EINTR)
continue;
ERR("write failed: %s\n", strerror(errno));
aluHandleDisconnect(device, "Failed to write playback samples: %s",
strerror(errno));
break;
}
to_write -= wrote;
write_ptr += wrote;
}
}
ALCsolarisBackend_unlock(self);
return 0;
}
static ALCenum ALCsolarisBackend_open(ALCsolarisBackend *self, const ALCchar *name)
{
ALCdevice *device;
if(!name)
name = solaris_device;
else if(strcmp(name, solaris_device) != 0)
return ALC_INVALID_VALUE;
self->fd = open(solaris_driver, O_WRONLY);
if(self->fd == -1)
{
ERR("Could not open %s: %s\n", solaris_driver, strerror(errno));
return ALC_INVALID_VALUE;
}
device = STATIC_CAST(ALCbackend,self)->mDevice;
alstr_copy_cstr(&device->DeviceName, name);
return ALC_NO_ERROR;
}
static ALCboolean ALCsolarisBackend_reset(ALCsolarisBackend *self)
{
ALCdevice *device = STATIC_CAST(ALCbackend,self)->mDevice;
audio_info_t info;
ALsizei frameSize;
ALsizei numChannels;
AUDIO_INITINFO(&info);
info.play.sample_rate = device->Frequency;
if(device->FmtChans != DevFmtMono)
device->FmtChans = DevFmtStereo;
numChannels = ChannelsFromDevFmt(device->FmtChans, device->AmbiOrder);
info.play.channels = numChannels;
switch(device->FmtType)
{
case DevFmtByte:
info.play.precision = 8;
info.play.encoding = AUDIO_ENCODING_LINEAR;
break;
case DevFmtUByte:
info.play.precision = 8;
info.play.encoding = AUDIO_ENCODING_LINEAR8;
break;
case DevFmtUShort:
case DevFmtInt:
case DevFmtUInt:
case DevFmtFloat:
device->FmtType = DevFmtShort;
/* fall-through */
case DevFmtShort:
info.play.precision = 16;
info.play.encoding = AUDIO_ENCODING_LINEAR;
break;
}
frameSize = numChannels * BytesFromDevFmt(device->FmtType);
info.play.buffer_size = device->UpdateSize*device->NumUpdates * frameSize;
if(ioctl(self->fd, AUDIO_SETINFO, &info) < 0)
{
ERR("ioctl failed: %s\n", strerror(errno));
return ALC_FALSE;
}
if(ChannelsFromDevFmt(device->FmtChans, device->AmbiOrder) != (ALsizei)info.play.channels)
{
ERR("Failed to set %s, got %u channels instead\n", DevFmtChannelsString(device->FmtChans), info.play.channels);
return ALC_FALSE;
}
if(!((info.play.precision == 8 && info.play.encoding == AUDIO_ENCODING_LINEAR8 && device->FmtType == DevFmtUByte) ||
(info.play.precision == 8 && info.play.encoding == AUDIO_ENCODING_LINEAR && device->FmtType == DevFmtByte) ||
(info.play.precision == 16 && info.play.encoding == AUDIO_ENCODING_LINEAR && device->FmtType == DevFmtShort) ||
(info.play.precision == 32 && info.play.encoding == AUDIO_ENCODING_LINEAR && device->FmtType == DevFmtInt)))
{
ERR("Could not set %s samples, got %d (0x%x)\n", DevFmtTypeString(device->FmtType),
info.play.precision, info.play.encoding);
return ALC_FALSE;
}
device->Frequency = info.play.sample_rate;
device->UpdateSize = (info.play.buffer_size/device->NumUpdates) + 1;
SetDefaultChannelOrder(device);
free(self->mix_data);
self->data_size = device->UpdateSize * FrameSizeFromDevFmt(
device->FmtChans, device->FmtType, device->AmbiOrder
);
self->mix_data = calloc(1, self->data_size);
return ALC_TRUE;
}
static ALCboolean ALCsolarisBackend_start(ALCsolarisBackend *self)
{
ATOMIC_STORE_SEQ(&self->killNow, AL_FALSE);
if(althrd_create(&self->thread, ALCsolarisBackend_mixerProc, self) != althrd_success)
return ALC_FALSE;
return ALC_TRUE;
}
static void ALCsolarisBackend_stop(ALCsolarisBackend *self)
{
int res;
if(ATOMIC_EXCHANGE_SEQ(&self->killNow, AL_TRUE))
return;
althrd_join(self->thread, &res);
if(ioctl(self->fd, AUDIO_DRAIN) < 0)
ERR("Error draining device: %s\n", strerror(errno));
}
typedef struct ALCsolarisBackendFactory {
DERIVE_FROM_TYPE(ALCbackendFactory);
} ALCsolarisBackendFactory;
#define ALCSOLARISBACKENDFACTORY_INITIALIZER { { GET_VTABLE2(ALCsolarisBackendFactory, ALCbackendFactory) } }
ALCbackendFactory *ALCsolarisBackendFactory_getFactory(void);
static ALCboolean ALCsolarisBackendFactory_init(ALCsolarisBackendFactory *self);
static DECLARE_FORWARD(ALCsolarisBackendFactory, ALCbackendFactory, void, deinit)
static ALCboolean ALCsolarisBackendFactory_querySupport(ALCsolarisBackendFactory *self, ALCbackend_Type type);
static void ALCsolarisBackendFactory_probe(ALCsolarisBackendFactory *self, enum DevProbe type);
static ALCbackend* ALCsolarisBackendFactory_createBackend(ALCsolarisBackendFactory *self, ALCdevice *device, ALCbackend_Type type);
DEFINE_ALCBACKENDFACTORY_VTABLE(ALCsolarisBackendFactory);
ALCbackendFactory *ALCsolarisBackendFactory_getFactory(void)
{
static ALCsolarisBackendFactory factory = ALCSOLARISBACKENDFACTORY_INITIALIZER;
return STATIC_CAST(ALCbackendFactory, &factory);
}
static ALCboolean ALCsolarisBackendFactory_init(ALCsolarisBackendFactory* UNUSED(self))
{
ConfigValueStr(NULL, "solaris", "device", &solaris_driver);
return ALC_TRUE;
}
static ALCboolean ALCsolarisBackendFactory_querySupport(ALCsolarisBackendFactory* UNUSED(self), ALCbackend_Type type)
{
if(type == ALCbackend_Playback)
return ALC_TRUE;
return ALC_FALSE;
}
static void ALCsolarisBackendFactory_probe(ALCsolarisBackendFactory* UNUSED(self), enum DevProbe type)
{
switch(type)
{
case ALL_DEVICE_PROBE:
{
#ifdef HAVE_STAT
struct stat buf;
if(stat(solaris_driver, &buf) == 0)
#endif
AppendAllDevicesList(solaris_device);
}
break;
case CAPTURE_DEVICE_PROBE:
break;
}
}
ALCbackend* ALCsolarisBackendFactory_createBackend(ALCsolarisBackendFactory* UNUSED(self), ALCdevice *device, ALCbackend_Type type)
{
if(type == ALCbackend_Playback)
{
ALCsolarisBackend *backend;
NEW_OBJ(backend, ALCsolarisBackend)(device);
if(!backend) return NULL;
return STATIC_CAST(ALCbackend, backend);
}
return NULL;
}

View file

@ -0,0 +1,294 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 1999-2007 by authors.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include "backends/solaris.h"
#include <sys/ioctl.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <stdlib.h>
#include <stdio.h>
#include <memory.h>
#include <unistd.h>
#include <errno.h>
#include <poll.h>
#include <math.h>
#include <thread>
#include <functional>
#include "alcmain.h"
#include "albyte.h"
#include "alu.h"
#include "alconfig.h"
#include "compat.h"
#include "core/logging.h"
#include "threads.h"
#include "vector.h"
#include <sys/audioio.h>
namespace {
constexpr char solaris_device[] = "Solaris Default";
std::string solaris_driver{"/dev/audio"};
struct SolarisBackend final : public BackendBase {
SolarisBackend(ALCdevice *device) noexcept : BackendBase{device} { }
~SolarisBackend() override;
int mixerProc();
void open(const char *name) override;
bool reset() override;
void start() override;
void stop() override;
int mFd{-1};
al::vector<al::byte> mBuffer;
std::atomic<bool> mKillNow{true};
std::thread mThread;
DEF_NEWDEL(SolarisBackend)
};
SolarisBackend::~SolarisBackend()
{
if(mFd != -1)
close(mFd);
mFd = -1;
}
int SolarisBackend::mixerProc()
{
SetRTPriority();
althrd_setname(MIXER_THREAD_NAME);
const size_t frame_step{mDevice->channelsFromFmt()};
const uint frame_size{mDevice->frameSizeFromFmt()};
while(!mKillNow.load(std::memory_order_acquire)
&& mDevice->Connected.load(std::memory_order_acquire))
{
pollfd pollitem{};
pollitem.fd = mFd;
pollitem.events = POLLOUT;
int pret{poll(&pollitem, 1, 1000)};
if(pret < 0)
{
if(errno == EINTR || errno == EAGAIN)
continue;
ERR("poll failed: %s\n", strerror(errno));
mDevice->handleDisconnect("Failed to wait for playback buffer: %s", strerror(errno));
break;
}
else if(pret == 0)
{
WARN("poll timeout\n");
continue;
}
al::byte *write_ptr{mBuffer.data()};
size_t to_write{mBuffer.size()};
mDevice->renderSamples(write_ptr, static_cast<uint>(to_write/frame_size), frame_step);
while(to_write > 0 && !mKillNow.load(std::memory_order_acquire))
{
ssize_t wrote{write(mFd, write_ptr, to_write)};
if(wrote < 0)
{
if(errno == EAGAIN || errno == EWOULDBLOCK || errno == EINTR)
continue;
ERR("write failed: %s\n", strerror(errno));
mDevice->handleDisconnect("Failed to write playback samples: %s", strerror(errno));
break;
}
to_write -= static_cast<size_t>(wrote);
write_ptr += wrote;
}
}
return 0;
}
void SolarisBackend::open(const char *name)
{
if(!name)
name = solaris_device;
else if(strcmp(name, solaris_device) != 0)
throw al::backend_exception{al::backend_error::NoDevice, "Device name \"%s\" not found",
name};
mFd = ::open(solaris_driver.c_str(), O_WRONLY);
if(mFd == -1)
throw al::backend_exception{al::backend_error::NoDevice, "Could not open %s: %s",
solaris_driver.c_str(), strerror(errno)};
mDevice->DeviceName = name;
}
bool SolarisBackend::reset()
{
audio_info_t info;
AUDIO_INITINFO(&info);
info.play.sample_rate = mDevice->Frequency;
if(mDevice->FmtChans != DevFmtMono)
mDevice->FmtChans = DevFmtStereo;
uint numChannels{mDevice->channelsFromFmt()};
info.play.channels = numChannels;
switch(mDevice->FmtType)
{
case DevFmtByte:
info.play.precision = 8;
info.play.encoding = AUDIO_ENCODING_LINEAR;
break;
case DevFmtUByte:
info.play.precision = 8;
info.play.encoding = AUDIO_ENCODING_LINEAR8;
break;
case DevFmtUShort:
case DevFmtInt:
case DevFmtUInt:
case DevFmtFloat:
mDevice->FmtType = DevFmtShort;
/* fall-through */
case DevFmtShort:
info.play.precision = 16;
info.play.encoding = AUDIO_ENCODING_LINEAR;
break;
}
uint frameSize{numChannels * mDevice->bytesFromFmt()};
info.play.buffer_size = mDevice->BufferSize * frameSize;
if(ioctl(mFd, AUDIO_SETINFO, &info) < 0)
{
ERR("ioctl failed: %s\n", strerror(errno));
return false;
}
if(mDevice->channelsFromFmt() != info.play.channels)
{
ERR("Failed to set %s, got %u channels instead\n", DevFmtChannelsString(mDevice->FmtChans),
info.play.channels);
return false;
}
if(!((info.play.precision == 8 && info.play.encoding == AUDIO_ENCODING_LINEAR8 && mDevice->FmtType == DevFmtUByte) ||
(info.play.precision == 8 && info.play.encoding == AUDIO_ENCODING_LINEAR && mDevice->FmtType == DevFmtByte) ||
(info.play.precision == 16 && info.play.encoding == AUDIO_ENCODING_LINEAR && mDevice->FmtType == DevFmtShort) ||
(info.play.precision == 32 && info.play.encoding == AUDIO_ENCODING_LINEAR && mDevice->FmtType == DevFmtInt)))
{
ERR("Could not set %s samples, got %d (0x%x)\n", DevFmtTypeString(mDevice->FmtType),
info.play.precision, info.play.encoding);
return false;
}
mDevice->Frequency = info.play.sample_rate;
mDevice->BufferSize = info.play.buffer_size / frameSize;
mDevice->UpdateSize = mDevice->BufferSize / 2;
setDefaultChannelOrder();
mBuffer.resize(mDevice->UpdateSize * mDevice->frameSizeFromFmt());
std::fill(mBuffer.begin(), mBuffer.end(), al::byte{});
return true;
}
void SolarisBackend::start()
{
try {
mKillNow.store(false, std::memory_order_release);
mThread = std::thread{std::mem_fn(&SolarisBackend::mixerProc), this};
}
catch(std::exception& e) {
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to start mixing thread: %s", e.what()};
}
}
void SolarisBackend::stop()
{
if(mKillNow.exchange(true, std::memory_order_acq_rel) || !mThread.joinable())
return;
mThread.join();
if(ioctl(mFd, AUDIO_DRAIN) < 0)
ERR("Error draining device: %s\n", strerror(errno));
}
} // namespace
BackendFactory &SolarisBackendFactory::getFactory()
{
static SolarisBackendFactory factory{};
return factory;
}
bool SolarisBackendFactory::init()
{
if(auto devopt = ConfigValueStr(nullptr, "solaris", "device"))
solaris_driver = std::move(*devopt);
return true;
}
bool SolarisBackendFactory::querySupport(BackendType type)
{ return type == BackendType::Playback; }
std::string SolarisBackendFactory::probe(BackendType type)
{
std::string outnames;
switch(type)
{
case BackendType::Playback:
{
struct stat buf;
if(stat(solaris_driver.c_str(), &buf) == 0)
outnames.append(solaris_device, sizeof(solaris_device));
}
break;
case BackendType::Capture:
break;
}
return outnames;
}
BackendPtr SolarisBackendFactory::createBackend(ALCdevice *device, BackendType type)
{
if(type == BackendType::Playback)
return BackendPtr{new SolarisBackend{device}};
return nullptr;
}

View file

@ -0,0 +1,19 @@
#ifndef BACKENDS_SOLARIS_H
#define BACKENDS_SOLARIS_H
#include "backends/base.h"
struct SolarisBackendFactory final : public BackendFactory {
public:
bool init() override;
bool querySupport(BackendType type) override;
std::string probe(BackendType type) override;
BackendPtr createBackend(ALCdevice *device, BackendType type) override;
static BackendFactory &getFactory();
};
#endif /* BACKENDS_SOLARIS_H */

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,19 @@
#ifndef BACKENDS_WASAPI_H
#define BACKENDS_WASAPI_H
#include "backends/base.h"
struct WasapiBackendFactory final : public BackendFactory {
public:
bool init() override;
bool querySupport(BackendType type) override;
std::string probe(BackendType type) override;
BackendPtr createBackend(ALCdevice *device, BackendType type) override;
static BackendFactory &getFactory();
};
#endif /* BACKENDS_WASAPI_H */

View file

@ -1,453 +0,0 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 1999-2007 by authors.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <stdlib.h>
#include <stdio.h>
#include <memory.h>
#include <errno.h>
#include "alMain.h"
#include "alu.h"
#include "alconfig.h"
#include "threads.h"
#include "compat.h"
#include "backends/base.h"
static const ALCchar waveDevice[] = "Wave File Writer";
static const ALubyte SUBTYPE_PCM[] = {
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xaa,
0x00, 0x38, 0x9b, 0x71
};
static const ALubyte SUBTYPE_FLOAT[] = {
0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xaa,
0x00, 0x38, 0x9b, 0x71
};
static const ALubyte SUBTYPE_BFORMAT_PCM[] = {
0x01, 0x00, 0x00, 0x00, 0x21, 0x07, 0xd3, 0x11, 0x86, 0x44, 0xc8, 0xc1,
0xca, 0x00, 0x00, 0x00
};
static const ALubyte SUBTYPE_BFORMAT_FLOAT[] = {
0x03, 0x00, 0x00, 0x00, 0x21, 0x07, 0xd3, 0x11, 0x86, 0x44, 0xc8, 0xc1,
0xca, 0x00, 0x00, 0x00
};
static void fwrite16le(ALushort val, FILE *f)
{
ALubyte data[2] = { val&0xff, (val>>8)&0xff };
fwrite(data, 1, 2, f);
}
static void fwrite32le(ALuint val, FILE *f)
{
ALubyte data[4] = { val&0xff, (val>>8)&0xff, (val>>16)&0xff, (val>>24)&0xff };
fwrite(data, 1, 4, f);
}
typedef struct ALCwaveBackend {
DERIVE_FROM_TYPE(ALCbackend);
FILE *mFile;
long mDataStart;
ALvoid *mBuffer;
ALuint mSize;
ATOMIC(ALenum) killNow;
althrd_t thread;
} ALCwaveBackend;
static int ALCwaveBackend_mixerProc(void *ptr);
static void ALCwaveBackend_Construct(ALCwaveBackend *self, ALCdevice *device);
static void ALCwaveBackend_Destruct(ALCwaveBackend *self);
static ALCenum ALCwaveBackend_open(ALCwaveBackend *self, const ALCchar *name);
static ALCboolean ALCwaveBackend_reset(ALCwaveBackend *self);
static ALCboolean ALCwaveBackend_start(ALCwaveBackend *self);
static void ALCwaveBackend_stop(ALCwaveBackend *self);
static DECLARE_FORWARD2(ALCwaveBackend, ALCbackend, ALCenum, captureSamples, void*, ALCuint)
static DECLARE_FORWARD(ALCwaveBackend, ALCbackend, ALCuint, availableSamples)
static DECLARE_FORWARD(ALCwaveBackend, ALCbackend, ClockLatency, getClockLatency)
static DECLARE_FORWARD(ALCwaveBackend, ALCbackend, void, lock)
static DECLARE_FORWARD(ALCwaveBackend, ALCbackend, void, unlock)
DECLARE_DEFAULT_ALLOCATORS(ALCwaveBackend)
DEFINE_ALCBACKEND_VTABLE(ALCwaveBackend);
static void ALCwaveBackend_Construct(ALCwaveBackend *self, ALCdevice *device)
{
ALCbackend_Construct(STATIC_CAST(ALCbackend, self), device);
SET_VTABLE2(ALCwaveBackend, ALCbackend, self);
self->mFile = NULL;
self->mDataStart = -1;
self->mBuffer = NULL;
self->mSize = 0;
ATOMIC_INIT(&self->killNow, AL_TRUE);
}
static void ALCwaveBackend_Destruct(ALCwaveBackend *self)
{
if(self->mFile)
fclose(self->mFile);
self->mFile = NULL;
ALCbackend_Destruct(STATIC_CAST(ALCbackend, self));
}
static int ALCwaveBackend_mixerProc(void *ptr)
{
ALCwaveBackend *self = (ALCwaveBackend*)ptr;
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
struct timespec now, start;
ALint64 avail, done;
ALuint frameSize;
size_t fs;
const long restTime = (long)((ALuint64)device->UpdateSize * 1000000000 /
device->Frequency / 2);
althrd_setname(althrd_current(), MIXER_THREAD_NAME);
frameSize = FrameSizeFromDevFmt(device->FmtChans, device->FmtType, device->AmbiOrder);
done = 0;
if(altimespec_get(&start, AL_TIME_UTC) != AL_TIME_UTC)
{
ERR("Failed to get starting time\n");
return 1;
}
while(!ATOMIC_LOAD(&self->killNow, almemory_order_acquire) &&
ATOMIC_LOAD(&device->Connected, almemory_order_acquire))
{
if(altimespec_get(&now, AL_TIME_UTC) != AL_TIME_UTC)
{
ERR("Failed to get current time\n");
return 1;
}
avail = (now.tv_sec - start.tv_sec) * device->Frequency;
avail += (ALint64)(now.tv_nsec - start.tv_nsec) * device->Frequency / 1000000000;
if(avail < done)
{
/* Oops, time skipped backwards. Reset the number of samples done
* with one update available since we (likely) just came back from
* sleeping. */
done = avail - device->UpdateSize;
}
if(avail-done < device->UpdateSize)
al_nssleep(restTime);
else while(avail-done >= device->UpdateSize)
{
ALCwaveBackend_lock(self);
aluMixData(device, self->mBuffer, device->UpdateSize);
ALCwaveBackend_unlock(self);
done += device->UpdateSize;
if(!IS_LITTLE_ENDIAN)
{
ALuint bytesize = BytesFromDevFmt(device->FmtType);
ALuint i;
if(bytesize == 2)
{
ALushort *samples = self->mBuffer;
ALuint len = self->mSize / 2;
for(i = 0;i < len;i++)
{
ALushort samp = samples[i];
samples[i] = (samp>>8) | (samp<<8);
}
}
else if(bytesize == 4)
{
ALuint *samples = self->mBuffer;
ALuint len = self->mSize / 4;
for(i = 0;i < len;i++)
{
ALuint samp = samples[i];
samples[i] = (samp>>24) | ((samp>>8)&0x0000ff00) |
((samp<<8)&0x00ff0000) | (samp<<24);
}
}
}
fs = fwrite(self->mBuffer, frameSize, device->UpdateSize, self->mFile);
(void)fs;
if(ferror(self->mFile))
{
ERR("Error writing to file\n");
ALCdevice_Lock(device);
aluHandleDisconnect(device, "Failed to write playback samples");
ALCdevice_Unlock(device);
break;
}
}
}
return 0;
}
static ALCenum ALCwaveBackend_open(ALCwaveBackend *self, const ALCchar *name)
{
ALCdevice *device;
const char *fname;
fname = GetConfigValue(NULL, "wave", "file", "");
if(!fname[0]) return ALC_INVALID_VALUE;
if(!name)
name = waveDevice;
else if(strcmp(name, waveDevice) != 0)
return ALC_INVALID_VALUE;
self->mFile = al_fopen(fname, "wb");
if(!self->mFile)
{
ERR("Could not open file '%s': %s\n", fname, strerror(errno));
return ALC_INVALID_VALUE;
}
device = STATIC_CAST(ALCbackend, self)->mDevice;
alstr_copy_cstr(&device->DeviceName, name);
return ALC_NO_ERROR;
}
static ALCboolean ALCwaveBackend_reset(ALCwaveBackend *self)
{
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
ALuint channels=0, bits=0, chanmask=0;
int isbformat = 0;
size_t val;
fseek(self->mFile, 0, SEEK_SET);
clearerr(self->mFile);
if(GetConfigValueBool(NULL, "wave", "bformat", 0))
{
device->FmtChans = DevFmtAmbi3D;
device->AmbiOrder = 1;
}
switch(device->FmtType)
{
case DevFmtByte:
device->FmtType = DevFmtUByte;
break;
case DevFmtUShort:
device->FmtType = DevFmtShort;
break;
case DevFmtUInt:
device->FmtType = DevFmtInt;
break;
case DevFmtUByte:
case DevFmtShort:
case DevFmtInt:
case DevFmtFloat:
break;
}
switch(device->FmtChans)
{
case DevFmtMono: chanmask = 0x04; break;
case DevFmtStereo: chanmask = 0x01 | 0x02; break;
case DevFmtQuad: chanmask = 0x01 | 0x02 | 0x10 | 0x20; break;
case DevFmtX51: chanmask = 0x01 | 0x02 | 0x04 | 0x08 | 0x200 | 0x400; break;
case DevFmtX51Rear: chanmask = 0x01 | 0x02 | 0x04 | 0x08 | 0x010 | 0x020; break;
case DevFmtX61: chanmask = 0x01 | 0x02 | 0x04 | 0x08 | 0x100 | 0x200 | 0x400; break;
case DevFmtX71: chanmask = 0x01 | 0x02 | 0x04 | 0x08 | 0x010 | 0x020 | 0x200 | 0x400; break;
case DevFmtAmbi3D:
/* .amb output requires FuMa */
device->AmbiLayout = AmbiLayout_FuMa;
device->AmbiScale = AmbiNorm_FuMa;
isbformat = 1;
chanmask = 0;
break;
}
bits = BytesFromDevFmt(device->FmtType) * 8;
channels = ChannelsFromDevFmt(device->FmtChans, device->AmbiOrder);
fputs("RIFF", self->mFile);
fwrite32le(0xFFFFFFFF, self->mFile); // 'RIFF' header len; filled in at close
fputs("WAVE", self->mFile);
fputs("fmt ", self->mFile);
fwrite32le(40, self->mFile); // 'fmt ' header len; 40 bytes for EXTENSIBLE
// 16-bit val, format type id (extensible: 0xFFFE)
fwrite16le(0xFFFE, self->mFile);
// 16-bit val, channel count
fwrite16le(channels, self->mFile);
// 32-bit val, frequency
fwrite32le(device->Frequency, self->mFile);
// 32-bit val, bytes per second
fwrite32le(device->Frequency * channels * bits / 8, self->mFile);
// 16-bit val, frame size
fwrite16le(channels * bits / 8, self->mFile);
// 16-bit val, bits per sample
fwrite16le(bits, self->mFile);
// 16-bit val, extra byte count
fwrite16le(22, self->mFile);
// 16-bit val, valid bits per sample
fwrite16le(bits, self->mFile);
// 32-bit val, channel mask
fwrite32le(chanmask, self->mFile);
// 16 byte GUID, sub-type format
val = fwrite((device->FmtType == DevFmtFloat) ?
(isbformat ? SUBTYPE_BFORMAT_FLOAT : SUBTYPE_FLOAT) :
(isbformat ? SUBTYPE_BFORMAT_PCM : SUBTYPE_PCM), 1, 16, self->mFile);
(void)val;
fputs("data", self->mFile);
fwrite32le(0xFFFFFFFF, self->mFile); // 'data' header len; filled in at close
if(ferror(self->mFile))
{
ERR("Error writing header: %s\n", strerror(errno));
return ALC_FALSE;
}
self->mDataStart = ftell(self->mFile);
SetDefaultWFXChannelOrder(device);
return ALC_TRUE;
}
static ALCboolean ALCwaveBackend_start(ALCwaveBackend *self)
{
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
self->mSize = device->UpdateSize * FrameSizeFromDevFmt(
device->FmtChans, device->FmtType, device->AmbiOrder
);
self->mBuffer = malloc(self->mSize);
if(!self->mBuffer)
{
ERR("Buffer malloc failed\n");
return ALC_FALSE;
}
ATOMIC_STORE(&self->killNow, AL_FALSE, almemory_order_release);
if(althrd_create(&self->thread, ALCwaveBackend_mixerProc, self) != althrd_success)
{
free(self->mBuffer);
self->mBuffer = NULL;
self->mSize = 0;
return ALC_FALSE;
}
return ALC_TRUE;
}
static void ALCwaveBackend_stop(ALCwaveBackend *self)
{
ALuint dataLen;
long size;
int res;
if(ATOMIC_EXCHANGE(&self->killNow, AL_TRUE, almemory_order_acq_rel))
return;
althrd_join(self->thread, &res);
free(self->mBuffer);
self->mBuffer = NULL;
size = ftell(self->mFile);
if(size > 0)
{
dataLen = size - self->mDataStart;
if(fseek(self->mFile, self->mDataStart-4, SEEK_SET) == 0)
fwrite32le(dataLen, self->mFile); // 'data' header len
if(fseek(self->mFile, 4, SEEK_SET) == 0)
fwrite32le(size-8, self->mFile); // 'WAVE' header len
}
}
typedef struct ALCwaveBackendFactory {
DERIVE_FROM_TYPE(ALCbackendFactory);
} ALCwaveBackendFactory;
#define ALCWAVEBACKENDFACTORY_INITIALIZER { { GET_VTABLE2(ALCwaveBackendFactory, ALCbackendFactory) } }
ALCbackendFactory *ALCwaveBackendFactory_getFactory(void);
static ALCboolean ALCwaveBackendFactory_init(ALCwaveBackendFactory *self);
static DECLARE_FORWARD(ALCwaveBackendFactory, ALCbackendFactory, void, deinit)
static ALCboolean ALCwaveBackendFactory_querySupport(ALCwaveBackendFactory *self, ALCbackend_Type type);
static void ALCwaveBackendFactory_probe(ALCwaveBackendFactory *self, enum DevProbe type);
static ALCbackend* ALCwaveBackendFactory_createBackend(ALCwaveBackendFactory *self, ALCdevice *device, ALCbackend_Type type);
DEFINE_ALCBACKENDFACTORY_VTABLE(ALCwaveBackendFactory);
ALCbackendFactory *ALCwaveBackendFactory_getFactory(void)
{
static ALCwaveBackendFactory factory = ALCWAVEBACKENDFACTORY_INITIALIZER;
return STATIC_CAST(ALCbackendFactory, &factory);
}
static ALCboolean ALCwaveBackendFactory_init(ALCwaveBackendFactory* UNUSED(self))
{
return ALC_TRUE;
}
static ALCboolean ALCwaveBackendFactory_querySupport(ALCwaveBackendFactory* UNUSED(self), ALCbackend_Type type)
{
if(type == ALCbackend_Playback)
return !!ConfigValueExists(NULL, "wave", "file");
return ALC_FALSE;
}
static void ALCwaveBackendFactory_probe(ALCwaveBackendFactory* UNUSED(self), enum DevProbe type)
{
switch(type)
{
case ALL_DEVICE_PROBE:
AppendAllDevicesList(waveDevice);
break;
case CAPTURE_DEVICE_PROBE:
break;
}
}
static ALCbackend* ALCwaveBackendFactory_createBackend(ALCwaveBackendFactory* UNUSED(self), ALCdevice *device, ALCbackend_Type type)
{
if(type == ALCbackend_Playback)
{
ALCwaveBackend *backend;
NEW_OBJ(backend, ALCwaveBackend)(device);
if(!backend) return NULL;
return STATIC_CAST(ALCbackend, backend);
}
return NULL;
}

View file

@ -0,0 +1,406 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 1999-2007 by authors.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include "backends/wave.h"
#include <algorithm>
#include <atomic>
#include <cerrno>
#include <chrono>
#include <cstdint>
#include <cstdio>
#include <cstring>
#include <exception>
#include <functional>
#include <thread>
#include "albit.h"
#include "albyte.h"
#include "alcmain.h"
#include "alconfig.h"
#include "almalloc.h"
#include "alnumeric.h"
#include "alu.h"
#include "compat.h"
#include "core/logging.h"
#include "opthelpers.h"
#include "strutils.h"
#include "threads.h"
#include "vector.h"
namespace {
using std::chrono::seconds;
using std::chrono::milliseconds;
using std::chrono::nanoseconds;
using ubyte = unsigned char;
using ushort = unsigned short;
constexpr char waveDevice[] = "Wave File Writer";
constexpr ubyte SUBTYPE_PCM[]{
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xaa,
0x00, 0x38, 0x9b, 0x71
};
constexpr ubyte SUBTYPE_FLOAT[]{
0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xaa,
0x00, 0x38, 0x9b, 0x71
};
constexpr ubyte SUBTYPE_BFORMAT_PCM[]{
0x01, 0x00, 0x00, 0x00, 0x21, 0x07, 0xd3, 0x11, 0x86, 0x44, 0xc8, 0xc1,
0xca, 0x00, 0x00, 0x00
};
constexpr ubyte SUBTYPE_BFORMAT_FLOAT[]{
0x03, 0x00, 0x00, 0x00, 0x21, 0x07, 0xd3, 0x11, 0x86, 0x44, 0xc8, 0xc1,
0xca, 0x00, 0x00, 0x00
};
void fwrite16le(ushort val, FILE *f)
{
ubyte data[2]{ static_cast<ubyte>(val&0xff), static_cast<ubyte>((val>>8)&0xff) };
fwrite(data, 1, 2, f);
}
void fwrite32le(uint val, FILE *f)
{
ubyte data[4]{ static_cast<ubyte>(val&0xff), static_cast<ubyte>((val>>8)&0xff),
static_cast<ubyte>((val>>16)&0xff), static_cast<ubyte>((val>>24)&0xff) };
fwrite(data, 1, 4, f);
}
struct WaveBackend final : public BackendBase {
WaveBackend(ALCdevice *device) noexcept : BackendBase{device} { }
~WaveBackend() override;
int mixerProc();
void open(const char *name) override;
bool reset() override;
void start() override;
void stop() override;
FILE *mFile{nullptr};
long mDataStart{-1};
al::vector<al::byte> mBuffer;
std::atomic<bool> mKillNow{true};
std::thread mThread;
DEF_NEWDEL(WaveBackend)
};
WaveBackend::~WaveBackend()
{
if(mFile)
fclose(mFile);
mFile = nullptr;
}
int WaveBackend::mixerProc()
{
const milliseconds restTime{mDevice->UpdateSize*1000/mDevice->Frequency / 2};
althrd_setname(MIXER_THREAD_NAME);
const size_t frameStep{mDevice->channelsFromFmt()};
const size_t frameSize{mDevice->frameSizeFromFmt()};
int64_t done{0};
auto start = std::chrono::steady_clock::now();
while(!mKillNow.load(std::memory_order_acquire) &&
mDevice->Connected.load(std::memory_order_acquire))
{
auto now = std::chrono::steady_clock::now();
/* This converts from nanoseconds to nanosamples, then to samples. */
int64_t avail{std::chrono::duration_cast<seconds>((now-start) *
mDevice->Frequency).count()};
if(avail-done < mDevice->UpdateSize)
{
std::this_thread::sleep_for(restTime);
continue;
}
while(avail-done >= mDevice->UpdateSize)
{
mDevice->renderSamples(mBuffer.data(), mDevice->UpdateSize, frameStep);
done += mDevice->UpdateSize;
if_constexpr(al::endian::native != al::endian::little)
{
const uint bytesize{mDevice->bytesFromFmt()};
if(bytesize == 2)
{
ushort *samples = reinterpret_cast<ushort*>(mBuffer.data());
const size_t len{mBuffer.size() / 2};
for(size_t i{0};i < len;i++)
{
const ushort samp{samples[i]};
samples[i] = static_cast<ushort>((samp>>8) | (samp<<8));
}
}
else if(bytesize == 4)
{
uint *samples = reinterpret_cast<uint*>(mBuffer.data());
const size_t len{mBuffer.size() / 4};
for(size_t i{0};i < len;i++)
{
const uint samp{samples[i]};
samples[i] = (samp>>24) | ((samp>>8)&0x0000ff00) |
((samp<<8)&0x00ff0000) | (samp<<24);
}
}
}
const size_t fs{fwrite(mBuffer.data(), frameSize, mDevice->UpdateSize, mFile)};
if(fs < mDevice->UpdateSize || ferror(mFile))
{
ERR("Error writing to file\n");
mDevice->handleDisconnect("Failed to write playback samples");
break;
}
}
/* For every completed second, increment the start time and reduce the
* samples done. This prevents the difference between the start time
* and current time from growing too large, while maintaining the
* correct number of samples to render.
*/
if(done >= mDevice->Frequency)
{
seconds s{done/mDevice->Frequency};
done %= mDevice->Frequency;
start += s;
}
}
return 0;
}
void WaveBackend::open(const char *name)
{
const char *fname{GetConfigValue(nullptr, "wave", "file", "")};
if(!fname[0]) throw al::backend_exception{al::backend_error::NoDevice,
"No wave output filename"};
if(!name)
name = waveDevice;
else if(strcmp(name, waveDevice) != 0)
throw al::backend_exception{al::backend_error::NoDevice, "Device name \"%s\" not found",
name};
#ifdef _WIN32
{
std::wstring wname = utf8_to_wstr(fname);
mFile = _wfopen(wname.c_str(), L"wb");
}
#else
mFile = fopen(fname, "wb");
#endif
if(!mFile)
throw al::backend_exception{al::backend_error::DeviceError, "Could not open file '%s': %s",
fname, strerror(errno)};
mDevice->DeviceName = name;
}
bool WaveBackend::reset()
{
uint channels{0}, bytes{0}, chanmask{0};
bool isbformat{false};
size_t val;
fseek(mFile, 0, SEEK_SET);
clearerr(mFile);
if(GetConfigValueBool(nullptr, "wave", "bformat", 0))
{
mDevice->FmtChans = DevFmtAmbi3D;
mDevice->mAmbiOrder = 1;
}
switch(mDevice->FmtType)
{
case DevFmtByte:
mDevice->FmtType = DevFmtUByte;
break;
case DevFmtUShort:
mDevice->FmtType = DevFmtShort;
break;
case DevFmtUInt:
mDevice->FmtType = DevFmtInt;
break;
case DevFmtUByte:
case DevFmtShort:
case DevFmtInt:
case DevFmtFloat:
break;
}
switch(mDevice->FmtChans)
{
case DevFmtMono: chanmask = 0x04; break;
case DevFmtStereo: chanmask = 0x01 | 0x02; break;
case DevFmtQuad: chanmask = 0x01 | 0x02 | 0x10 | 0x20; break;
case DevFmtX51: chanmask = 0x01 | 0x02 | 0x04 | 0x08 | 0x200 | 0x400; break;
case DevFmtX51Rear: chanmask = 0x01 | 0x02 | 0x04 | 0x08 | 0x010 | 0x020; break;
case DevFmtX61: chanmask = 0x01 | 0x02 | 0x04 | 0x08 | 0x100 | 0x200 | 0x400; break;
case DevFmtX71: chanmask = 0x01 | 0x02 | 0x04 | 0x08 | 0x010 | 0x020 | 0x200 | 0x400; break;
case DevFmtAmbi3D:
/* .amb output requires FuMa */
mDevice->mAmbiOrder = minu(mDevice->mAmbiOrder, 3);
mDevice->mAmbiLayout = DevAmbiLayout::FuMa;
mDevice->mAmbiScale = DevAmbiScaling::FuMa;
isbformat = true;
chanmask = 0;
break;
}
bytes = mDevice->bytesFromFmt();
channels = mDevice->channelsFromFmt();
rewind(mFile);
fputs("RIFF", mFile);
fwrite32le(0xFFFFFFFF, mFile); // 'RIFF' header len; filled in at close
fputs("WAVE", mFile);
fputs("fmt ", mFile);
fwrite32le(40, mFile); // 'fmt ' header len; 40 bytes for EXTENSIBLE
// 16-bit val, format type id (extensible: 0xFFFE)
fwrite16le(0xFFFE, mFile);
// 16-bit val, channel count
fwrite16le(static_cast<ushort>(channels), mFile);
// 32-bit val, frequency
fwrite32le(mDevice->Frequency, mFile);
// 32-bit val, bytes per second
fwrite32le(mDevice->Frequency * channels * bytes, mFile);
// 16-bit val, frame size
fwrite16le(static_cast<ushort>(channels * bytes), mFile);
// 16-bit val, bits per sample
fwrite16le(static_cast<ushort>(bytes * 8), mFile);
// 16-bit val, extra byte count
fwrite16le(22, mFile);
// 16-bit val, valid bits per sample
fwrite16le(static_cast<ushort>(bytes * 8), mFile);
// 32-bit val, channel mask
fwrite32le(chanmask, mFile);
// 16 byte GUID, sub-type format
val = fwrite((mDevice->FmtType == DevFmtFloat) ?
(isbformat ? SUBTYPE_BFORMAT_FLOAT : SUBTYPE_FLOAT) :
(isbformat ? SUBTYPE_BFORMAT_PCM : SUBTYPE_PCM), 1, 16, mFile);
(void)val;
fputs("data", mFile);
fwrite32le(0xFFFFFFFF, mFile); // 'data' header len; filled in at close
if(ferror(mFile))
{
ERR("Error writing header: %s\n", strerror(errno));
return false;
}
mDataStart = ftell(mFile);
setDefaultWFXChannelOrder();
const uint bufsize{mDevice->frameSizeFromFmt() * mDevice->UpdateSize};
mBuffer.resize(bufsize);
return true;
}
void WaveBackend::start()
{
if(mDataStart > 0 && fseek(mFile, 0, SEEK_END) != 0)
WARN("Failed to seek on output file\n");
try {
mKillNow.store(false, std::memory_order_release);
mThread = std::thread{std::mem_fn(&WaveBackend::mixerProc), this};
}
catch(std::exception& e) {
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to start mixing thread: %s", e.what()};
}
}
void WaveBackend::stop()
{
if(mKillNow.exchange(true, std::memory_order_acq_rel) || !mThread.joinable())
return;
mThread.join();
if(mDataStart > 0)
{
long size{ftell(mFile)};
if(size > 0)
{
long dataLen{size - mDataStart};
if(fseek(mFile, 4, SEEK_SET) == 0)
fwrite32le(static_cast<uint>(size-8), mFile); // 'WAVE' header len
if(fseek(mFile, mDataStart-4, SEEK_SET) == 0)
fwrite32le(static_cast<uint>(dataLen), mFile); // 'data' header len
}
}
}
} // namespace
bool WaveBackendFactory::init()
{ return true; }
bool WaveBackendFactory::querySupport(BackendType type)
{ return type == BackendType::Playback; }
std::string WaveBackendFactory::probe(BackendType type)
{
std::string outnames;
switch(type)
{
case BackendType::Playback:
/* Includes null char. */
outnames.append(waveDevice, sizeof(waveDevice));
break;
case BackendType::Capture:
break;
}
return outnames;
}
BackendPtr WaveBackendFactory::createBackend(ALCdevice *device, BackendType type)
{
if(type == BackendType::Playback)
return BackendPtr{new WaveBackend{device}};
return nullptr;
}
BackendFactory &WaveBackendFactory::getFactory()
{
static WaveBackendFactory factory{};
return factory;
}

View file

@ -0,0 +1,19 @@
#ifndef BACKENDS_WAVE_H
#define BACKENDS_WAVE_H
#include "backends/base.h"
struct WaveBackendFactory final : public BackendFactory {
public:
bool init() override;
bool querySupport(BackendType type) override;
std::string probe(BackendType type) override;
BackendPtr createBackend(ALCdevice *device, BackendType type) override;
static BackendFactory &getFactory();
};
#endif /* BACKENDS_WAVE_H */

View file

@ -1,792 +0,0 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 1999-2007 by authors.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <stdlib.h>
#include <stdio.h>
#include <memory.h>
#include <windows.h>
#include <mmsystem.h>
#include "alMain.h"
#include "alu.h"
#include "ringbuffer.h"
#include "threads.h"
#include "backends/base.h"
#ifndef WAVE_FORMAT_IEEE_FLOAT
#define WAVE_FORMAT_IEEE_FLOAT 0x0003
#endif
#define DEVNAME_HEAD "OpenAL Soft on "
static vector_al_string PlaybackDevices;
static vector_al_string CaptureDevices;
static void clear_devlist(vector_al_string *list)
{
VECTOR_FOR_EACH(al_string, *list, alstr_reset);
VECTOR_RESIZE(*list, 0, 0);
}
static void ProbePlaybackDevices(void)
{
ALuint numdevs;
ALuint i;
clear_devlist(&PlaybackDevices);
numdevs = waveOutGetNumDevs();
VECTOR_RESIZE(PlaybackDevices, 0, numdevs);
for(i = 0;i < numdevs;i++)
{
WAVEOUTCAPSW WaveCaps;
const al_string *iter;
al_string dname;
AL_STRING_INIT(dname);
if(waveOutGetDevCapsW(i, &WaveCaps, sizeof(WaveCaps)) == MMSYSERR_NOERROR)
{
ALuint count = 0;
while(1)
{
alstr_copy_cstr(&dname, DEVNAME_HEAD);
alstr_append_wcstr(&dname, WaveCaps.szPname);
if(count != 0)
{
char str[64];
snprintf(str, sizeof(str), " #%d", count+1);
alstr_append_cstr(&dname, str);
}
count++;
#define MATCH_ENTRY(i) (alstr_cmp(dname, *(i)) == 0)
VECTOR_FIND_IF(iter, const al_string, PlaybackDevices, MATCH_ENTRY);
if(iter == VECTOR_END(PlaybackDevices)) break;
#undef MATCH_ENTRY
}
TRACE("Got device \"%s\", ID %u\n", alstr_get_cstr(dname), i);
}
VECTOR_PUSH_BACK(PlaybackDevices, dname);
}
}
static void ProbeCaptureDevices(void)
{
ALuint numdevs;
ALuint i;
clear_devlist(&CaptureDevices);
numdevs = waveInGetNumDevs();
VECTOR_RESIZE(CaptureDevices, 0, numdevs);
for(i = 0;i < numdevs;i++)
{
WAVEINCAPSW WaveCaps;
const al_string *iter;
al_string dname;
AL_STRING_INIT(dname);
if(waveInGetDevCapsW(i, &WaveCaps, sizeof(WaveCaps)) == MMSYSERR_NOERROR)
{
ALuint count = 0;
while(1)
{
alstr_copy_cstr(&dname, DEVNAME_HEAD);
alstr_append_wcstr(&dname, WaveCaps.szPname);
if(count != 0)
{
char str[64];
snprintf(str, sizeof(str), " #%d", count+1);
alstr_append_cstr(&dname, str);
}
count++;
#define MATCH_ENTRY(i) (alstr_cmp(dname, *(i)) == 0)
VECTOR_FIND_IF(iter, const al_string, CaptureDevices, MATCH_ENTRY);
if(iter == VECTOR_END(CaptureDevices)) break;
#undef MATCH_ENTRY
}
TRACE("Got device \"%s\", ID %u\n", alstr_get_cstr(dname), i);
}
VECTOR_PUSH_BACK(CaptureDevices, dname);
}
}
typedef struct ALCwinmmPlayback {
DERIVE_FROM_TYPE(ALCbackend);
RefCount WaveBuffersCommitted;
WAVEHDR WaveBuffer[4];
HWAVEOUT OutHdl;
WAVEFORMATEX Format;
ATOMIC(ALenum) killNow;
althrd_t thread;
} ALCwinmmPlayback;
static void ALCwinmmPlayback_Construct(ALCwinmmPlayback *self, ALCdevice *device);
static void ALCwinmmPlayback_Destruct(ALCwinmmPlayback *self);
static void CALLBACK ALCwinmmPlayback_waveOutProc(HWAVEOUT device, UINT msg, DWORD_PTR instance, DWORD_PTR param1, DWORD_PTR param2);
static int ALCwinmmPlayback_mixerProc(void *arg);
static ALCenum ALCwinmmPlayback_open(ALCwinmmPlayback *self, const ALCchar *name);
static ALCboolean ALCwinmmPlayback_reset(ALCwinmmPlayback *self);
static ALCboolean ALCwinmmPlayback_start(ALCwinmmPlayback *self);
static void ALCwinmmPlayback_stop(ALCwinmmPlayback *self);
static DECLARE_FORWARD2(ALCwinmmPlayback, ALCbackend, ALCenum, captureSamples, ALCvoid*, ALCuint)
static DECLARE_FORWARD(ALCwinmmPlayback, ALCbackend, ALCuint, availableSamples)
static DECLARE_FORWARD(ALCwinmmPlayback, ALCbackend, ClockLatency, getClockLatency)
static DECLARE_FORWARD(ALCwinmmPlayback, ALCbackend, void, lock)
static DECLARE_FORWARD(ALCwinmmPlayback, ALCbackend, void, unlock)
DECLARE_DEFAULT_ALLOCATORS(ALCwinmmPlayback)
DEFINE_ALCBACKEND_VTABLE(ALCwinmmPlayback);
static void ALCwinmmPlayback_Construct(ALCwinmmPlayback *self, ALCdevice *device)
{
ALCbackend_Construct(STATIC_CAST(ALCbackend, self), device);
SET_VTABLE2(ALCwinmmPlayback, ALCbackend, self);
InitRef(&self->WaveBuffersCommitted, 0);
self->OutHdl = NULL;
ATOMIC_INIT(&self->killNow, AL_TRUE);
}
static void ALCwinmmPlayback_Destruct(ALCwinmmPlayback *self)
{
if(self->OutHdl)
waveOutClose(self->OutHdl);
self->OutHdl = 0;
ALCbackend_Destruct(STATIC_CAST(ALCbackend, self));
}
/* ALCwinmmPlayback_waveOutProc
*
* Posts a message to 'ALCwinmmPlayback_mixerProc' everytime a WaveOut Buffer
* is completed and returns to the application (for more data)
*/
static void CALLBACK ALCwinmmPlayback_waveOutProc(HWAVEOUT UNUSED(device), UINT msg, DWORD_PTR instance, DWORD_PTR param1, DWORD_PTR UNUSED(param2))
{
ALCwinmmPlayback *self = (ALCwinmmPlayback*)instance;
if(msg != WOM_DONE)
return;
DecrementRef(&self->WaveBuffersCommitted);
PostThreadMessage(self->thread, msg, 0, param1);
}
FORCE_ALIGN static int ALCwinmmPlayback_mixerProc(void *arg)
{
ALCwinmmPlayback *self = arg;
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
WAVEHDR *WaveHdr;
MSG msg;
SetRTPriority();
althrd_setname(althrd_current(), MIXER_THREAD_NAME);
while(GetMessage(&msg, NULL, 0, 0))
{
if(msg.message != WOM_DONE)
continue;
if(ATOMIC_LOAD(&self->killNow, almemory_order_acquire))
{
if(ReadRef(&self->WaveBuffersCommitted) == 0)
break;
continue;
}
WaveHdr = ((WAVEHDR*)msg.lParam);
ALCwinmmPlayback_lock(self);
aluMixData(device, WaveHdr->lpData, WaveHdr->dwBufferLength /
self->Format.nBlockAlign);
ALCwinmmPlayback_unlock(self);
// Send buffer back to play more data
waveOutWrite(self->OutHdl, WaveHdr, sizeof(WAVEHDR));
IncrementRef(&self->WaveBuffersCommitted);
}
return 0;
}
static ALCenum ALCwinmmPlayback_open(ALCwinmmPlayback *self, const ALCchar *deviceName)
{
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
const al_string *iter;
UINT DeviceID;
MMRESULT res;
if(VECTOR_SIZE(PlaybackDevices) == 0)
ProbePlaybackDevices();
// Find the Device ID matching the deviceName if valid
#define MATCH_DEVNAME(iter) (!alstr_empty(*(iter)) && \
(!deviceName || alstr_cmp_cstr(*(iter), deviceName) == 0))
VECTOR_FIND_IF(iter, const al_string, PlaybackDevices, MATCH_DEVNAME);
if(iter == VECTOR_END(PlaybackDevices))
return ALC_INVALID_VALUE;
#undef MATCH_DEVNAME
DeviceID = (UINT)(iter - VECTOR_BEGIN(PlaybackDevices));
retry_open:
memset(&self->Format, 0, sizeof(WAVEFORMATEX));
if(device->FmtType == DevFmtFloat)
{
self->Format.wFormatTag = WAVE_FORMAT_IEEE_FLOAT;
self->Format.wBitsPerSample = 32;
}
else
{
self->Format.wFormatTag = WAVE_FORMAT_PCM;
if(device->FmtType == DevFmtUByte || device->FmtType == DevFmtByte)
self->Format.wBitsPerSample = 8;
else
self->Format.wBitsPerSample = 16;
}
self->Format.nChannels = ((device->FmtChans == DevFmtMono) ? 1 : 2);
self->Format.nBlockAlign = self->Format.wBitsPerSample *
self->Format.nChannels / 8;
self->Format.nSamplesPerSec = device->Frequency;
self->Format.nAvgBytesPerSec = self->Format.nSamplesPerSec *
self->Format.nBlockAlign;
self->Format.cbSize = 0;
if((res=waveOutOpen(&self->OutHdl, DeviceID, &self->Format, (DWORD_PTR)&ALCwinmmPlayback_waveOutProc, (DWORD_PTR)self, CALLBACK_FUNCTION)) != MMSYSERR_NOERROR)
{
if(device->FmtType == DevFmtFloat)
{
device->FmtType = DevFmtShort;
goto retry_open;
}
ERR("waveOutOpen failed: %u\n", res);
goto failure;
}
alstr_copy(&device->DeviceName, VECTOR_ELEM(PlaybackDevices, DeviceID));
return ALC_NO_ERROR;
failure:
if(self->OutHdl)
waveOutClose(self->OutHdl);
self->OutHdl = NULL;
return ALC_INVALID_VALUE;
}
static ALCboolean ALCwinmmPlayback_reset(ALCwinmmPlayback *self)
{
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
device->UpdateSize = (ALuint)((ALuint64)device->UpdateSize *
self->Format.nSamplesPerSec /
device->Frequency);
device->UpdateSize = (device->UpdateSize*device->NumUpdates + 3) / 4;
device->NumUpdates = 4;
device->Frequency = self->Format.nSamplesPerSec;
if(self->Format.wFormatTag == WAVE_FORMAT_IEEE_FLOAT)
{
if(self->Format.wBitsPerSample == 32)
device->FmtType = DevFmtFloat;
else
{
ERR("Unhandled IEEE float sample depth: %d\n", self->Format.wBitsPerSample);
return ALC_FALSE;
}
}
else if(self->Format.wFormatTag == WAVE_FORMAT_PCM)
{
if(self->Format.wBitsPerSample == 16)
device->FmtType = DevFmtShort;
else if(self->Format.wBitsPerSample == 8)
device->FmtType = DevFmtUByte;
else
{
ERR("Unhandled PCM sample depth: %d\n", self->Format.wBitsPerSample);
return ALC_FALSE;
}
}
else
{
ERR("Unhandled format tag: 0x%04x\n", self->Format.wFormatTag);
return ALC_FALSE;
}
if(self->Format.nChannels == 2)
device->FmtChans = DevFmtStereo;
else if(self->Format.nChannels == 1)
device->FmtChans = DevFmtMono;
else
{
ERR("Unhandled channel count: %d\n", self->Format.nChannels);
return ALC_FALSE;
}
SetDefaultWFXChannelOrder(device);
return ALC_TRUE;
}
static ALCboolean ALCwinmmPlayback_start(ALCwinmmPlayback *self)
{
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
ALbyte *BufferData;
ALint BufferSize;
ALuint i;
ATOMIC_STORE(&self->killNow, AL_FALSE, almemory_order_release);
if(althrd_create(&self->thread, ALCwinmmPlayback_mixerProc, self) != althrd_success)
return ALC_FALSE;
InitRef(&self->WaveBuffersCommitted, 0);
// Create 4 Buffers
BufferSize = device->UpdateSize*device->NumUpdates / 4;
BufferSize *= FrameSizeFromDevFmt(device->FmtChans, device->FmtType, device->AmbiOrder);
BufferData = calloc(4, BufferSize);
for(i = 0;i < 4;i++)
{
memset(&self->WaveBuffer[i], 0, sizeof(WAVEHDR));
self->WaveBuffer[i].dwBufferLength = BufferSize;
self->WaveBuffer[i].lpData = ((i==0) ? (CHAR*)BufferData :
(self->WaveBuffer[i-1].lpData +
self->WaveBuffer[i-1].dwBufferLength));
waveOutPrepareHeader(self->OutHdl, &self->WaveBuffer[i], sizeof(WAVEHDR));
waveOutWrite(self->OutHdl, &self->WaveBuffer[i], sizeof(WAVEHDR));
IncrementRef(&self->WaveBuffersCommitted);
}
return ALC_TRUE;
}
static void ALCwinmmPlayback_stop(ALCwinmmPlayback *self)
{
void *buffer = NULL;
int i;
if(ATOMIC_EXCHANGE(&self->killNow, AL_TRUE, almemory_order_acq_rel))
return;
althrd_join(self->thread, &i);
// Release the wave buffers
for(i = 0;i < 4;i++)
{
waveOutUnprepareHeader(self->OutHdl, &self->WaveBuffer[i], sizeof(WAVEHDR));
if(i == 0) buffer = self->WaveBuffer[i].lpData;
self->WaveBuffer[i].lpData = NULL;
}
free(buffer);
}
typedef struct ALCwinmmCapture {
DERIVE_FROM_TYPE(ALCbackend);
RefCount WaveBuffersCommitted;
WAVEHDR WaveBuffer[4];
HWAVEIN InHdl;
ll_ringbuffer_t *Ring;
WAVEFORMATEX Format;
ATOMIC(ALenum) killNow;
althrd_t thread;
} ALCwinmmCapture;
static void ALCwinmmCapture_Construct(ALCwinmmCapture *self, ALCdevice *device);
static void ALCwinmmCapture_Destruct(ALCwinmmCapture *self);
static void CALLBACK ALCwinmmCapture_waveInProc(HWAVEIN device, UINT msg, DWORD_PTR instance, DWORD_PTR param1, DWORD_PTR param2);
static int ALCwinmmCapture_captureProc(void *arg);
static ALCenum ALCwinmmCapture_open(ALCwinmmCapture *self, const ALCchar *name);
static DECLARE_FORWARD(ALCwinmmCapture, ALCbackend, ALCboolean, reset)
static ALCboolean ALCwinmmCapture_start(ALCwinmmCapture *self);
static void ALCwinmmCapture_stop(ALCwinmmCapture *self);
static ALCenum ALCwinmmCapture_captureSamples(ALCwinmmCapture *self, ALCvoid *buffer, ALCuint samples);
static ALCuint ALCwinmmCapture_availableSamples(ALCwinmmCapture *self);
static DECLARE_FORWARD(ALCwinmmCapture, ALCbackend, ClockLatency, getClockLatency)
static DECLARE_FORWARD(ALCwinmmCapture, ALCbackend, void, lock)
static DECLARE_FORWARD(ALCwinmmCapture, ALCbackend, void, unlock)
DECLARE_DEFAULT_ALLOCATORS(ALCwinmmCapture)
DEFINE_ALCBACKEND_VTABLE(ALCwinmmCapture);
static void ALCwinmmCapture_Construct(ALCwinmmCapture *self, ALCdevice *device)
{
ALCbackend_Construct(STATIC_CAST(ALCbackend, self), device);
SET_VTABLE2(ALCwinmmCapture, ALCbackend, self);
InitRef(&self->WaveBuffersCommitted, 0);
self->InHdl = NULL;
ATOMIC_INIT(&self->killNow, AL_TRUE);
}
static void ALCwinmmCapture_Destruct(ALCwinmmCapture *self)
{
void *buffer = NULL;
int i;
/* Tell the processing thread to quit and wait for it to do so. */
if(!ATOMIC_EXCHANGE(&self->killNow, AL_TRUE, almemory_order_acq_rel))
{
PostThreadMessage(self->thread, WM_QUIT, 0, 0);
althrd_join(self->thread, &i);
/* Make sure capture is stopped and all pending buffers are flushed. */
waveInReset(self->InHdl);
// Release the wave buffers
for(i = 0;i < 4;i++)
{
waveInUnprepareHeader(self->InHdl, &self->WaveBuffer[i], sizeof(WAVEHDR));
if(i == 0) buffer = self->WaveBuffer[i].lpData;
self->WaveBuffer[i].lpData = NULL;
}
free(buffer);
}
ll_ringbuffer_free(self->Ring);
self->Ring = NULL;
// Close the Wave device
if(self->InHdl)
waveInClose(self->InHdl);
self->InHdl = 0;
ALCbackend_Destruct(STATIC_CAST(ALCbackend, self));
}
/* ALCwinmmCapture_waveInProc
*
* Posts a message to 'ALCwinmmCapture_captureProc' everytime a WaveIn Buffer
* is completed and returns to the application (with more data).
*/
static void CALLBACK ALCwinmmCapture_waveInProc(HWAVEIN UNUSED(device), UINT msg, DWORD_PTR instance, DWORD_PTR param1, DWORD_PTR UNUSED(param2))
{
ALCwinmmCapture *self = (ALCwinmmCapture*)instance;
if(msg != WIM_DATA)
return;
DecrementRef(&self->WaveBuffersCommitted);
PostThreadMessage(self->thread, msg, 0, param1);
}
static int ALCwinmmCapture_captureProc(void *arg)
{
ALCwinmmCapture *self = arg;
WAVEHDR *WaveHdr;
MSG msg;
althrd_setname(althrd_current(), RECORD_THREAD_NAME);
while(GetMessage(&msg, NULL, 0, 0))
{
if(msg.message != WIM_DATA)
continue;
/* Don't wait for other buffers to finish before quitting. We're
* closing so we don't need them. */
if(ATOMIC_LOAD(&self->killNow, almemory_order_acquire))
break;
WaveHdr = ((WAVEHDR*)msg.lParam);
ll_ringbuffer_write(self->Ring, WaveHdr->lpData,
WaveHdr->dwBytesRecorded / self->Format.nBlockAlign
);
// Send buffer back to capture more data
waveInAddBuffer(self->InHdl, WaveHdr, sizeof(WAVEHDR));
IncrementRef(&self->WaveBuffersCommitted);
}
return 0;
}
static ALCenum ALCwinmmCapture_open(ALCwinmmCapture *self, const ALCchar *name)
{
ALCdevice *device = STATIC_CAST(ALCbackend, self)->mDevice;
const al_string *iter;
ALbyte *BufferData = NULL;
DWORD CapturedDataSize;
ALint BufferSize;
UINT DeviceID;
MMRESULT res;
ALuint i;
if(VECTOR_SIZE(CaptureDevices) == 0)
ProbeCaptureDevices();
// Find the Device ID matching the deviceName if valid
#define MATCH_DEVNAME(iter) (!alstr_empty(*(iter)) && (!name || alstr_cmp_cstr(*iter, name) == 0))
VECTOR_FIND_IF(iter, const al_string, CaptureDevices, MATCH_DEVNAME);
if(iter == VECTOR_END(CaptureDevices))
return ALC_INVALID_VALUE;
#undef MATCH_DEVNAME
DeviceID = (UINT)(iter - VECTOR_BEGIN(CaptureDevices));
switch(device->FmtChans)
{
case DevFmtMono:
case DevFmtStereo:
break;
case DevFmtQuad:
case DevFmtX51:
case DevFmtX51Rear:
case DevFmtX61:
case DevFmtX71:
case DevFmtAmbi3D:
return ALC_INVALID_ENUM;
}
switch(device->FmtType)
{
case DevFmtUByte:
case DevFmtShort:
case DevFmtInt:
case DevFmtFloat:
break;
case DevFmtByte:
case DevFmtUShort:
case DevFmtUInt:
return ALC_INVALID_ENUM;
}
memset(&self->Format, 0, sizeof(WAVEFORMATEX));
self->Format.wFormatTag = ((device->FmtType == DevFmtFloat) ?
WAVE_FORMAT_IEEE_FLOAT : WAVE_FORMAT_PCM);
self->Format.nChannels = ChannelsFromDevFmt(device->FmtChans, device->AmbiOrder);
self->Format.wBitsPerSample = BytesFromDevFmt(device->FmtType) * 8;
self->Format.nBlockAlign = self->Format.wBitsPerSample *
self->Format.nChannels / 8;
self->Format.nSamplesPerSec = device->Frequency;
self->Format.nAvgBytesPerSec = self->Format.nSamplesPerSec *
self->Format.nBlockAlign;
self->Format.cbSize = 0;
if((res=waveInOpen(&self->InHdl, DeviceID, &self->Format, (DWORD_PTR)&ALCwinmmCapture_waveInProc, (DWORD_PTR)self, CALLBACK_FUNCTION)) != MMSYSERR_NOERROR)
{
ERR("waveInOpen failed: %u\n", res);
goto failure;
}
// Allocate circular memory buffer for the captured audio
CapturedDataSize = device->UpdateSize*device->NumUpdates;
// Make sure circular buffer is at least 100ms in size
if(CapturedDataSize < (self->Format.nSamplesPerSec / 10))
CapturedDataSize = self->Format.nSamplesPerSec / 10;
self->Ring = ll_ringbuffer_create(CapturedDataSize, self->Format.nBlockAlign, false);
if(!self->Ring) goto failure;
InitRef(&self->WaveBuffersCommitted, 0);
// Create 4 Buffers of 50ms each
BufferSize = self->Format.nAvgBytesPerSec / 20;
BufferSize -= (BufferSize % self->Format.nBlockAlign);
BufferData = calloc(4, BufferSize);
if(!BufferData) goto failure;
for(i = 0;i < 4;i++)
{
memset(&self->WaveBuffer[i], 0, sizeof(WAVEHDR));
self->WaveBuffer[i].dwBufferLength = BufferSize;
self->WaveBuffer[i].lpData = ((i==0) ? (CHAR*)BufferData :
(self->WaveBuffer[i-1].lpData +
self->WaveBuffer[i-1].dwBufferLength));
self->WaveBuffer[i].dwFlags = 0;
self->WaveBuffer[i].dwLoops = 0;
waveInPrepareHeader(self->InHdl, &self->WaveBuffer[i], sizeof(WAVEHDR));
waveInAddBuffer(self->InHdl, &self->WaveBuffer[i], sizeof(WAVEHDR));
IncrementRef(&self->WaveBuffersCommitted);
}
ATOMIC_STORE(&self->killNow, AL_FALSE, almemory_order_release);
if(althrd_create(&self->thread, ALCwinmmCapture_captureProc, self) != althrd_success)
goto failure;
alstr_copy(&device->DeviceName, VECTOR_ELEM(CaptureDevices, DeviceID));
return ALC_NO_ERROR;
failure:
if(BufferData)
{
for(i = 0;i < 4;i++)
waveInUnprepareHeader(self->InHdl, &self->WaveBuffer[i], sizeof(WAVEHDR));
free(BufferData);
}
ll_ringbuffer_free(self->Ring);
self->Ring = NULL;
if(self->InHdl)
waveInClose(self->InHdl);
self->InHdl = NULL;
return ALC_INVALID_VALUE;
}
static ALCboolean ALCwinmmCapture_start(ALCwinmmCapture *self)
{
waveInStart(self->InHdl);
return ALC_TRUE;
}
static void ALCwinmmCapture_stop(ALCwinmmCapture *self)
{
waveInStop(self->InHdl);
}
static ALCenum ALCwinmmCapture_captureSamples(ALCwinmmCapture *self, ALCvoid *buffer, ALCuint samples)
{
ll_ringbuffer_read(self->Ring, buffer, samples);
return ALC_NO_ERROR;
}
static ALCuint ALCwinmmCapture_availableSamples(ALCwinmmCapture *self)
{
return (ALCuint)ll_ringbuffer_read_space(self->Ring);
}
static inline void AppendAllDevicesList2(const al_string *name)
{
if(!alstr_empty(*name))
AppendAllDevicesList(alstr_get_cstr(*name));
}
static inline void AppendCaptureDeviceList2(const al_string *name)
{
if(!alstr_empty(*name))
AppendCaptureDeviceList(alstr_get_cstr(*name));
}
typedef struct ALCwinmmBackendFactory {
DERIVE_FROM_TYPE(ALCbackendFactory);
} ALCwinmmBackendFactory;
#define ALCWINMMBACKENDFACTORY_INITIALIZER { { GET_VTABLE2(ALCwinmmBackendFactory, ALCbackendFactory) } }
static ALCboolean ALCwinmmBackendFactory_init(ALCwinmmBackendFactory *self);
static void ALCwinmmBackendFactory_deinit(ALCwinmmBackendFactory *self);
static ALCboolean ALCwinmmBackendFactory_querySupport(ALCwinmmBackendFactory *self, ALCbackend_Type type);
static void ALCwinmmBackendFactory_probe(ALCwinmmBackendFactory *self, enum DevProbe type);
static ALCbackend* ALCwinmmBackendFactory_createBackend(ALCwinmmBackendFactory *self, ALCdevice *device, ALCbackend_Type type);
DEFINE_ALCBACKENDFACTORY_VTABLE(ALCwinmmBackendFactory);
static ALCboolean ALCwinmmBackendFactory_init(ALCwinmmBackendFactory* UNUSED(self))
{
VECTOR_INIT(PlaybackDevices);
VECTOR_INIT(CaptureDevices);
return ALC_TRUE;
}
static void ALCwinmmBackendFactory_deinit(ALCwinmmBackendFactory* UNUSED(self))
{
clear_devlist(&PlaybackDevices);
VECTOR_DEINIT(PlaybackDevices);
clear_devlist(&CaptureDevices);
VECTOR_DEINIT(CaptureDevices);
}
static ALCboolean ALCwinmmBackendFactory_querySupport(ALCwinmmBackendFactory* UNUSED(self), ALCbackend_Type type)
{
if(type == ALCbackend_Playback || type == ALCbackend_Capture)
return ALC_TRUE;
return ALC_FALSE;
}
static void ALCwinmmBackendFactory_probe(ALCwinmmBackendFactory* UNUSED(self), enum DevProbe type)
{
switch(type)
{
case ALL_DEVICE_PROBE:
ProbePlaybackDevices();
VECTOR_FOR_EACH(const al_string, PlaybackDevices, AppendAllDevicesList2);
break;
case CAPTURE_DEVICE_PROBE:
ProbeCaptureDevices();
VECTOR_FOR_EACH(const al_string, CaptureDevices, AppendCaptureDeviceList2);
break;
}
}
static ALCbackend* ALCwinmmBackendFactory_createBackend(ALCwinmmBackendFactory* UNUSED(self), ALCdevice *device, ALCbackend_Type type)
{
if(type == ALCbackend_Playback)
{
ALCwinmmPlayback *backend;
NEW_OBJ(backend, ALCwinmmPlayback)(device);
if(!backend) return NULL;
return STATIC_CAST(ALCbackend, backend);
}
if(type == ALCbackend_Capture)
{
ALCwinmmCapture *backend;
NEW_OBJ(backend, ALCwinmmCapture)(device);
if(!backend) return NULL;
return STATIC_CAST(ALCbackend, backend);
}
return NULL;
}
ALCbackendFactory *ALCwinmmBackendFactory_getFactory(void)
{
static ALCwinmmBackendFactory factory = ALCWINMMBACKENDFACTORY_INITIALIZER;
return STATIC_CAST(ALCbackendFactory, &factory);
}

View file

@ -0,0 +1,631 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 1999-2007 by authors.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include "backends/winmm.h"
#include <stdlib.h>
#include <stdio.h>
#include <memory.h>
#include <windows.h>
#include <mmsystem.h>
#include <mmreg.h>
#include <array>
#include <atomic>
#include <thread>
#include <vector>
#include <string>
#include <algorithm>
#include <functional>
#include "alcmain.h"
#include "alu.h"
#include "compat.h"
#include "core/logging.h"
#include "ringbuffer.h"
#include "strutils.h"
#include "threads.h"
#ifndef WAVE_FORMAT_IEEE_FLOAT
#define WAVE_FORMAT_IEEE_FLOAT 0x0003
#endif
namespace {
#define DEVNAME_HEAD "OpenAL Soft on "
al::vector<std::string> PlaybackDevices;
al::vector<std::string> CaptureDevices;
bool checkName(const al::vector<std::string> &list, const std::string &name)
{ return std::find(list.cbegin(), list.cend(), name) != list.cend(); }
void ProbePlaybackDevices(void)
{
PlaybackDevices.clear();
UINT numdevs{waveOutGetNumDevs()};
PlaybackDevices.reserve(numdevs);
for(UINT i{0};i < numdevs;++i)
{
std::string dname;
WAVEOUTCAPSW WaveCaps{};
if(waveOutGetDevCapsW(i, &WaveCaps, sizeof(WaveCaps)) == MMSYSERR_NOERROR)
{
const std::string basename{DEVNAME_HEAD + wstr_to_utf8(WaveCaps.szPname)};
int count{1};
std::string newname{basename};
while(checkName(PlaybackDevices, newname))
{
newname = basename;
newname += " #";
newname += std::to_string(++count);
}
dname = std::move(newname);
TRACE("Got device \"%s\", ID %u\n", dname.c_str(), i);
}
PlaybackDevices.emplace_back(std::move(dname));
}
}
void ProbeCaptureDevices(void)
{
CaptureDevices.clear();
UINT numdevs{waveInGetNumDevs()};
CaptureDevices.reserve(numdevs);
for(UINT i{0};i < numdevs;++i)
{
std::string dname;
WAVEINCAPSW WaveCaps{};
if(waveInGetDevCapsW(i, &WaveCaps, sizeof(WaveCaps)) == MMSYSERR_NOERROR)
{
const std::string basename{DEVNAME_HEAD + wstr_to_utf8(WaveCaps.szPname)};
int count{1};
std::string newname{basename};
while(checkName(CaptureDevices, newname))
{
newname = basename;
newname += " #";
newname += std::to_string(++count);
}
dname = std::move(newname);
TRACE("Got device \"%s\", ID %u\n", dname.c_str(), i);
}
CaptureDevices.emplace_back(std::move(dname));
}
}
struct WinMMPlayback final : public BackendBase {
WinMMPlayback(ALCdevice *device) noexcept : BackendBase{device} { }
~WinMMPlayback() override;
void CALLBACK waveOutProc(HWAVEOUT device, UINT msg, DWORD_PTR param1, DWORD_PTR param2) noexcept;
static void CALLBACK waveOutProcC(HWAVEOUT device, UINT msg, DWORD_PTR instance, DWORD_PTR param1, DWORD_PTR param2) noexcept
{ reinterpret_cast<WinMMPlayback*>(instance)->waveOutProc(device, msg, param1, param2); }
int mixerProc();
void open(const char *name) override;
bool reset() override;
void start() override;
void stop() override;
std::atomic<uint> mWritable{0u};
al::semaphore mSem;
uint mIdx{0u};
std::array<WAVEHDR,4> mWaveBuffer{};
HWAVEOUT mOutHdl{nullptr};
WAVEFORMATEX mFormat{};
std::atomic<bool> mKillNow{true};
std::thread mThread;
DEF_NEWDEL(WinMMPlayback)
};
WinMMPlayback::~WinMMPlayback()
{
if(mOutHdl)
waveOutClose(mOutHdl);
mOutHdl = nullptr;
al_free(mWaveBuffer[0].lpData);
std::fill(mWaveBuffer.begin(), mWaveBuffer.end(), WAVEHDR{});
}
/* WinMMPlayback::waveOutProc
*
* Posts a message to 'WinMMPlayback::mixerProc' everytime a WaveOut Buffer is
* completed and returns to the application (for more data)
*/
void CALLBACK WinMMPlayback::waveOutProc(HWAVEOUT, UINT msg, DWORD_PTR, DWORD_PTR) noexcept
{
if(msg != WOM_DONE) return;
mWritable.fetch_add(1, std::memory_order_acq_rel);
mSem.post();
}
FORCE_ALIGN int WinMMPlayback::mixerProc()
{
SetRTPriority();
althrd_setname(MIXER_THREAD_NAME);
const size_t frame_step{mDevice->channelsFromFmt()};
while(!mKillNow.load(std::memory_order_acquire)
&& mDevice->Connected.load(std::memory_order_acquire))
{
uint todo{mWritable.load(std::memory_order_acquire)};
if(todo < 1)
{
mSem.wait();
continue;
}
size_t widx{mIdx};
do {
WAVEHDR &waveHdr = mWaveBuffer[widx];
widx = (widx+1) % mWaveBuffer.size();
mDevice->renderSamples(waveHdr.lpData, mDevice->UpdateSize, frame_step);
mWritable.fetch_sub(1, std::memory_order_acq_rel);
waveOutWrite(mOutHdl, &waveHdr, sizeof(WAVEHDR));
} while(--todo);
mIdx = static_cast<uint>(widx);
}
return 0;
}
void WinMMPlayback::open(const char *name)
{
if(PlaybackDevices.empty())
ProbePlaybackDevices();
// Find the Device ID matching the deviceName if valid
auto iter = name ?
std::find(PlaybackDevices.cbegin(), PlaybackDevices.cend(), name) :
PlaybackDevices.cbegin();
if(iter == PlaybackDevices.cend())
throw al::backend_exception{al::backend_error::NoDevice, "Device name \"%s\" not found",
name};
auto DeviceID = static_cast<UINT>(std::distance(PlaybackDevices.cbegin(), iter));
retry_open:
mFormat = WAVEFORMATEX{};
if(mDevice->FmtType == DevFmtFloat)
{
mFormat.wFormatTag = WAVE_FORMAT_IEEE_FLOAT;
mFormat.wBitsPerSample = 32;
}
else
{
mFormat.wFormatTag = WAVE_FORMAT_PCM;
if(mDevice->FmtType == DevFmtUByte || mDevice->FmtType == DevFmtByte)
mFormat.wBitsPerSample = 8;
else
mFormat.wBitsPerSample = 16;
}
mFormat.nChannels = ((mDevice->FmtChans == DevFmtMono) ? 1 : 2);
mFormat.nBlockAlign = static_cast<WORD>(mFormat.wBitsPerSample * mFormat.nChannels / 8);
mFormat.nSamplesPerSec = mDevice->Frequency;
mFormat.nAvgBytesPerSec = mFormat.nSamplesPerSec * mFormat.nBlockAlign;
mFormat.cbSize = 0;
MMRESULT res{waveOutOpen(&mOutHdl, DeviceID, &mFormat,
reinterpret_cast<DWORD_PTR>(&WinMMPlayback::waveOutProcC),
reinterpret_cast<DWORD_PTR>(this), CALLBACK_FUNCTION)};
if(res != MMSYSERR_NOERROR)
{
if(mDevice->FmtType == DevFmtFloat)
{
mDevice->FmtType = DevFmtShort;
goto retry_open;
}
throw al::backend_exception{al::backend_error::DeviceError, "waveOutOpen failed: %u", res};
}
mDevice->DeviceName = PlaybackDevices[DeviceID];
}
bool WinMMPlayback::reset()
{
mDevice->BufferSize = static_cast<uint>(uint64_t{mDevice->BufferSize} *
mFormat.nSamplesPerSec / mDevice->Frequency);
mDevice->BufferSize = (mDevice->BufferSize+3) & ~0x3u;
mDevice->UpdateSize = mDevice->BufferSize / 4;
mDevice->Frequency = mFormat.nSamplesPerSec;
if(mFormat.wFormatTag == WAVE_FORMAT_IEEE_FLOAT)
{
if(mFormat.wBitsPerSample == 32)
mDevice->FmtType = DevFmtFloat;
else
{
ERR("Unhandled IEEE float sample depth: %d\n", mFormat.wBitsPerSample);
return false;
}
}
else if(mFormat.wFormatTag == WAVE_FORMAT_PCM)
{
if(mFormat.wBitsPerSample == 16)
mDevice->FmtType = DevFmtShort;
else if(mFormat.wBitsPerSample == 8)
mDevice->FmtType = DevFmtUByte;
else
{
ERR("Unhandled PCM sample depth: %d\n", mFormat.wBitsPerSample);
return false;
}
}
else
{
ERR("Unhandled format tag: 0x%04x\n", mFormat.wFormatTag);
return false;
}
uint chanmask{};
if(mFormat.nChannels == 2)
{
mDevice->FmtChans = DevFmtStereo;
chanmask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT;
}
else if(mFormat.nChannels == 1)
{
mDevice->FmtChans = DevFmtMono;
chanmask = SPEAKER_FRONT_CENTER;
}
else
{
ERR("Unhandled channel count: %d\n", mFormat.nChannels);
return false;
}
setChannelOrderFromWFXMask(chanmask);
uint BufferSize{mDevice->UpdateSize * mDevice->frameSizeFromFmt()};
al_free(mWaveBuffer[0].lpData);
mWaveBuffer[0] = WAVEHDR{};
mWaveBuffer[0].lpData = static_cast<char*>(al_calloc(16, BufferSize * mWaveBuffer.size()));
mWaveBuffer[0].dwBufferLength = BufferSize;
for(size_t i{1};i < mWaveBuffer.size();i++)
{
mWaveBuffer[i] = WAVEHDR{};
mWaveBuffer[i].lpData = mWaveBuffer[i-1].lpData + mWaveBuffer[i-1].dwBufferLength;
mWaveBuffer[i].dwBufferLength = BufferSize;
}
mIdx = 0;
return true;
}
void WinMMPlayback::start()
{
try {
std::for_each(mWaveBuffer.begin(), mWaveBuffer.end(),
[this](WAVEHDR &waveHdr) -> void
{ waveOutPrepareHeader(mOutHdl, &waveHdr, sizeof(WAVEHDR)); });
mWritable.store(static_cast<uint>(mWaveBuffer.size()), std::memory_order_release);
mKillNow.store(false, std::memory_order_release);
mThread = std::thread{std::mem_fn(&WinMMPlayback::mixerProc), this};
}
catch(std::exception& e) {
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to start mixing thread: %s", e.what()};
}
}
void WinMMPlayback::stop()
{
if(mKillNow.exchange(true, std::memory_order_acq_rel) || !mThread.joinable())
return;
mThread.join();
while(mWritable.load(std::memory_order_acquire) < mWaveBuffer.size())
mSem.wait();
std::for_each(mWaveBuffer.begin(), mWaveBuffer.end(),
[this](WAVEHDR &waveHdr) -> void
{ waveOutUnprepareHeader(mOutHdl, &waveHdr, sizeof(WAVEHDR)); });
mWritable.store(0, std::memory_order_release);
}
struct WinMMCapture final : public BackendBase {
WinMMCapture(ALCdevice *device) noexcept : BackendBase{device} { }
~WinMMCapture() override;
void CALLBACK waveInProc(HWAVEIN device, UINT msg, DWORD_PTR param1, DWORD_PTR param2) noexcept;
static void CALLBACK waveInProcC(HWAVEIN device, UINT msg, DWORD_PTR instance, DWORD_PTR param1, DWORD_PTR param2) noexcept
{ reinterpret_cast<WinMMCapture*>(instance)->waveInProc(device, msg, param1, param2); }
int captureProc();
void open(const char *name) override;
void start() override;
void stop() override;
void captureSamples(al::byte *buffer, uint samples) override;
uint availableSamples() override;
std::atomic<uint> mReadable{0u};
al::semaphore mSem;
uint mIdx{0};
std::array<WAVEHDR,4> mWaveBuffer{};
HWAVEIN mInHdl{nullptr};
RingBufferPtr mRing{nullptr};
WAVEFORMATEX mFormat{};
std::atomic<bool> mKillNow{true};
std::thread mThread;
DEF_NEWDEL(WinMMCapture)
};
WinMMCapture::~WinMMCapture()
{
// Close the Wave device
if(mInHdl)
waveInClose(mInHdl);
mInHdl = nullptr;
al_free(mWaveBuffer[0].lpData);
std::fill(mWaveBuffer.begin(), mWaveBuffer.end(), WAVEHDR{});
}
/* WinMMCapture::waveInProc
*
* Posts a message to 'WinMMCapture::captureProc' everytime a WaveIn Buffer is
* completed and returns to the application (with more data).
*/
void CALLBACK WinMMCapture::waveInProc(HWAVEIN, UINT msg, DWORD_PTR, DWORD_PTR) noexcept
{
if(msg != WIM_DATA) return;
mReadable.fetch_add(1, std::memory_order_acq_rel);
mSem.post();
}
int WinMMCapture::captureProc()
{
althrd_setname(RECORD_THREAD_NAME);
while(!mKillNow.load(std::memory_order_acquire) &&
mDevice->Connected.load(std::memory_order_acquire))
{
uint todo{mReadable.load(std::memory_order_acquire)};
if(todo < 1)
{
mSem.wait();
continue;
}
size_t widx{mIdx};
do {
WAVEHDR &waveHdr = mWaveBuffer[widx];
widx = (widx+1) % mWaveBuffer.size();
mRing->write(waveHdr.lpData, waveHdr.dwBytesRecorded / mFormat.nBlockAlign);
mReadable.fetch_sub(1, std::memory_order_acq_rel);
waveInAddBuffer(mInHdl, &waveHdr, sizeof(WAVEHDR));
} while(--todo);
mIdx = static_cast<uint>(widx);
}
return 0;
}
void WinMMCapture::open(const char *name)
{
if(CaptureDevices.empty())
ProbeCaptureDevices();
// Find the Device ID matching the deviceName if valid
auto iter = name ?
std::find(CaptureDevices.cbegin(), CaptureDevices.cend(), name) :
CaptureDevices.cbegin();
if(iter == CaptureDevices.cend())
throw al::backend_exception{al::backend_error::NoDevice, "Device name \"%s\" not found",
name};
auto DeviceID = static_cast<UINT>(std::distance(CaptureDevices.cbegin(), iter));
switch(mDevice->FmtChans)
{
case DevFmtMono:
case DevFmtStereo:
break;
case DevFmtQuad:
case DevFmtX51:
case DevFmtX51Rear:
case DevFmtX61:
case DevFmtX71:
case DevFmtAmbi3D:
throw al::backend_exception{al::backend_error::DeviceError, "%s capture not supported",
DevFmtChannelsString(mDevice->FmtChans)};
}
switch(mDevice->FmtType)
{
case DevFmtUByte:
case DevFmtShort:
case DevFmtInt:
case DevFmtFloat:
break;
case DevFmtByte:
case DevFmtUShort:
case DevFmtUInt:
throw al::backend_exception{al::backend_error::DeviceError, "%s samples not supported",
DevFmtTypeString(mDevice->FmtType)};
}
mFormat = WAVEFORMATEX{};
mFormat.wFormatTag = (mDevice->FmtType == DevFmtFloat) ?
WAVE_FORMAT_IEEE_FLOAT : WAVE_FORMAT_PCM;
mFormat.nChannels = static_cast<WORD>(mDevice->channelsFromFmt());
mFormat.wBitsPerSample = static_cast<WORD>(mDevice->bytesFromFmt() * 8);
mFormat.nBlockAlign = static_cast<WORD>(mFormat.wBitsPerSample * mFormat.nChannels / 8);
mFormat.nSamplesPerSec = mDevice->Frequency;
mFormat.nAvgBytesPerSec = mFormat.nSamplesPerSec * mFormat.nBlockAlign;
mFormat.cbSize = 0;
MMRESULT res{waveInOpen(&mInHdl, DeviceID, &mFormat,
reinterpret_cast<DWORD_PTR>(&WinMMCapture::waveInProcC),
reinterpret_cast<DWORD_PTR>(this), CALLBACK_FUNCTION)};
if(res != MMSYSERR_NOERROR)
throw al::backend_exception{al::backend_error::DeviceError, "waveInOpen failed: %u", res};
// Ensure each buffer is 50ms each
DWORD BufferSize{mFormat.nAvgBytesPerSec / 20u};
BufferSize -= (BufferSize % mFormat.nBlockAlign);
// Allocate circular memory buffer for the captured audio
// Make sure circular buffer is at least 100ms in size
uint CapturedDataSize{mDevice->BufferSize};
CapturedDataSize = static_cast<uint>(maxz(CapturedDataSize, BufferSize*mWaveBuffer.size()));
mRing = RingBuffer::Create(CapturedDataSize, mFormat.nBlockAlign, false);
al_free(mWaveBuffer[0].lpData);
mWaveBuffer[0] = WAVEHDR{};
mWaveBuffer[0].lpData = static_cast<char*>(al_calloc(16, BufferSize * mWaveBuffer.size()));
mWaveBuffer[0].dwBufferLength = BufferSize;
for(size_t i{1};i < mWaveBuffer.size();++i)
{
mWaveBuffer[i] = WAVEHDR{};
mWaveBuffer[i].lpData = mWaveBuffer[i-1].lpData + mWaveBuffer[i-1].dwBufferLength;
mWaveBuffer[i].dwBufferLength = mWaveBuffer[i-1].dwBufferLength;
}
mDevice->DeviceName = CaptureDevices[DeviceID];
}
void WinMMCapture::start()
{
try {
for(size_t i{0};i < mWaveBuffer.size();++i)
{
waveInPrepareHeader(mInHdl, &mWaveBuffer[i], sizeof(WAVEHDR));
waveInAddBuffer(mInHdl, &mWaveBuffer[i], sizeof(WAVEHDR));
}
mKillNow.store(false, std::memory_order_release);
mThread = std::thread{std::mem_fn(&WinMMCapture::captureProc), this};
waveInStart(mInHdl);
}
catch(std::exception& e) {
throw al::backend_exception{al::backend_error::DeviceError,
"Failed to start recording thread: %s", e.what()};
}
}
void WinMMCapture::stop()
{
waveInStop(mInHdl);
mKillNow.store(true, std::memory_order_release);
if(mThread.joinable())
{
mSem.post();
mThread.join();
}
waveInReset(mInHdl);
for(size_t i{0};i < mWaveBuffer.size();++i)
waveInUnprepareHeader(mInHdl, &mWaveBuffer[i], sizeof(WAVEHDR));
mReadable.store(0, std::memory_order_release);
mIdx = 0;
}
void WinMMCapture::captureSamples(al::byte *buffer, uint samples)
{ mRing->read(buffer, samples); }
uint WinMMCapture::availableSamples()
{ return static_cast<uint>(mRing->readSpace()); }
} // namespace
bool WinMMBackendFactory::init()
{ return true; }
bool WinMMBackendFactory::querySupport(BackendType type)
{ return type == BackendType::Playback || type == BackendType::Capture; }
std::string WinMMBackendFactory::probe(BackendType type)
{
std::string outnames;
auto add_device = [&outnames](const std::string &dname) -> void
{
/* +1 to also append the null char (to ensure a null-separated list and
* double-null terminated list).
*/
if(!dname.empty())
outnames.append(dname.c_str(), dname.length()+1);
};
switch(type)
{
case BackendType::Playback:
ProbePlaybackDevices();
std::for_each(PlaybackDevices.cbegin(), PlaybackDevices.cend(), add_device);
break;
case BackendType::Capture:
ProbeCaptureDevices();
std::for_each(CaptureDevices.cbegin(), CaptureDevices.cend(), add_device);
break;
}
return outnames;
}
BackendPtr WinMMBackendFactory::createBackend(ALCdevice *device, BackendType type)
{
if(type == BackendType::Playback)
return BackendPtr{new WinMMPlayback{device}};
if(type == BackendType::Capture)
return BackendPtr{new WinMMCapture{device}};
return nullptr;
}
BackendFactory &WinMMBackendFactory::getFactory()
{
static WinMMBackendFactory factory{};
return factory;
}

View file

@ -0,0 +1,19 @@
#ifndef BACKENDS_WINMM_H
#define BACKENDS_WINMM_H
#include "backends/base.h"
struct WinMMBackendFactory final : public BackendFactory {
public:
bool init() override;
bool querySupport(BackendType type) override;
std::string probe(BackendType type) override;
BackendPtr createBackend(ALCdevice *device, BackendType type) override;
static BackendFactory &getFactory();
};
#endif /* BACKENDS_WINMM_H */

View file

@ -1,492 +0,0 @@
#include "config.h"
#include "bformatdec.h"
#include "ambdec.h"
#include "filters/splitter.h"
#include "alu.h"
#include "bool.h"
#include "threads.h"
#include "almalloc.h"
/* NOTE: These are scale factors as applied to Ambisonics content. Decoder
* coefficients should be divided by these values to get proper N3D scalings.
*/
const ALfloat N3D2N3DScale[MAX_AMBI_COEFFS] = {
1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f,
1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f
};
const ALfloat SN3D2N3DScale[MAX_AMBI_COEFFS] = {
1.000000000f, /* ACN 0 (W), sqrt(1) */
1.732050808f, /* ACN 1 (Y), sqrt(3) */
1.732050808f, /* ACN 2 (Z), sqrt(3) */
1.732050808f, /* ACN 3 (X), sqrt(3) */
2.236067978f, /* ACN 4 (V), sqrt(5) */
2.236067978f, /* ACN 5 (T), sqrt(5) */
2.236067978f, /* ACN 6 (R), sqrt(5) */
2.236067978f, /* ACN 7 (S), sqrt(5) */
2.236067978f, /* ACN 8 (U), sqrt(5) */
2.645751311f, /* ACN 9 (Q), sqrt(7) */
2.645751311f, /* ACN 10 (O), sqrt(7) */
2.645751311f, /* ACN 11 (M), sqrt(7) */
2.645751311f, /* ACN 12 (K), sqrt(7) */
2.645751311f, /* ACN 13 (L), sqrt(7) */
2.645751311f, /* ACN 14 (N), sqrt(7) */
2.645751311f, /* ACN 15 (P), sqrt(7) */
};
const ALfloat FuMa2N3DScale[MAX_AMBI_COEFFS] = {
1.414213562f, /* ACN 0 (W), sqrt(2) */
1.732050808f, /* ACN 1 (Y), sqrt(3) */
1.732050808f, /* ACN 2 (Z), sqrt(3) */
1.732050808f, /* ACN 3 (X), sqrt(3) */
1.936491673f, /* ACN 4 (V), sqrt(15)/2 */
1.936491673f, /* ACN 5 (T), sqrt(15)/2 */
2.236067978f, /* ACN 6 (R), sqrt(5) */
1.936491673f, /* ACN 7 (S), sqrt(15)/2 */
1.936491673f, /* ACN 8 (U), sqrt(15)/2 */
2.091650066f, /* ACN 9 (Q), sqrt(35/8) */
1.972026594f, /* ACN 10 (O), sqrt(35)/3 */
2.231093404f, /* ACN 11 (M), sqrt(224/45) */
2.645751311f, /* ACN 12 (K), sqrt(7) */
2.231093404f, /* ACN 13 (L), sqrt(224/45) */
1.972026594f, /* ACN 14 (N), sqrt(35)/3 */
2.091650066f, /* ACN 15 (P), sqrt(35/8) */
};
#define HF_BAND 0
#define LF_BAND 1
#define NUM_BANDS 2
/* These points are in AL coordinates! */
static const ALfloat Ambi3DPoints[8][3] = {
{ -0.577350269f, 0.577350269f, -0.577350269f },
{ 0.577350269f, 0.577350269f, -0.577350269f },
{ -0.577350269f, 0.577350269f, 0.577350269f },
{ 0.577350269f, 0.577350269f, 0.577350269f },
{ -0.577350269f, -0.577350269f, -0.577350269f },
{ 0.577350269f, -0.577350269f, -0.577350269f },
{ -0.577350269f, -0.577350269f, 0.577350269f },
{ 0.577350269f, -0.577350269f, 0.577350269f },
};
static const ALfloat Ambi3DDecoder[8][MAX_AMBI_COEFFS] = {
{ 0.125f, 0.125f, 0.125f, 0.125f },
{ 0.125f, -0.125f, 0.125f, 0.125f },
{ 0.125f, 0.125f, 0.125f, -0.125f },
{ 0.125f, -0.125f, 0.125f, -0.125f },
{ 0.125f, 0.125f, -0.125f, 0.125f },
{ 0.125f, -0.125f, -0.125f, 0.125f },
{ 0.125f, 0.125f, -0.125f, -0.125f },
{ 0.125f, -0.125f, -0.125f, -0.125f },
};
static const ALfloat Ambi3DDecoderHFScale[MAX_AMBI_COEFFS] = {
2.0f,
1.15470054f, 1.15470054f, 1.15470054f
};
/* NOTE: BandSplitter filters are unused with single-band decoding */
typedef struct BFormatDec {
ALuint Enabled; /* Bitfield of enabled channels. */
union {
alignas(16) ALfloat Dual[MAX_OUTPUT_CHANNELS][NUM_BANDS][MAX_AMBI_COEFFS];
alignas(16) ALfloat Single[MAX_OUTPUT_CHANNELS][MAX_AMBI_COEFFS];
} Matrix;
BandSplitter XOver[MAX_AMBI_COEFFS];
ALfloat (*Samples)[BUFFERSIZE];
/* These two alias into Samples */
ALfloat (*SamplesHF)[BUFFERSIZE];
ALfloat (*SamplesLF)[BUFFERSIZE];
alignas(16) ALfloat ChannelMix[BUFFERSIZE];
struct {
BandSplitter XOver;
ALfloat Gains[NUM_BANDS];
} UpSampler[4];
ALsizei NumChannels;
ALboolean DualBand;
} BFormatDec;
BFormatDec *bformatdec_alloc()
{
return al_calloc(16, sizeof(BFormatDec));
}
void bformatdec_free(BFormatDec **dec)
{
if(dec && *dec)
{
al_free((*dec)->Samples);
(*dec)->Samples = NULL;
(*dec)->SamplesHF = NULL;
(*dec)->SamplesLF = NULL;
al_free(*dec);
*dec = NULL;
}
}
void bformatdec_reset(BFormatDec *dec, const AmbDecConf *conf, ALsizei chancount, ALuint srate, const ALsizei chanmap[MAX_OUTPUT_CHANNELS])
{
static const ALsizei map2DTo3D[MAX_AMBI2D_COEFFS] = {
0, 1, 3, 4, 8, 9, 15
};
const ALfloat *coeff_scale = N3D2N3DScale;
bool periphonic;
ALfloat ratio;
ALsizei i;
al_free(dec->Samples);
dec->Samples = NULL;
dec->SamplesHF = NULL;
dec->SamplesLF = NULL;
dec->NumChannels = chancount;
dec->Samples = al_calloc(16, dec->NumChannels*2 * sizeof(dec->Samples[0]));
dec->SamplesHF = dec->Samples;
dec->SamplesLF = dec->SamplesHF + dec->NumChannels;
dec->Enabled = 0;
for(i = 0;i < conf->NumSpeakers;i++)
dec->Enabled |= 1 << chanmap[i];
if(conf->CoeffScale == ADS_SN3D)
coeff_scale = SN3D2N3DScale;
else if(conf->CoeffScale == ADS_FuMa)
coeff_scale = FuMa2N3DScale;
memset(dec->UpSampler, 0, sizeof(dec->UpSampler));
ratio = 400.0f / (ALfloat)srate;
for(i = 0;i < 4;i++)
bandsplit_init(&dec->UpSampler[i].XOver, ratio);
if((conf->ChanMask&AMBI_PERIPHONIC_MASK))
{
periphonic = true;
dec->UpSampler[0].Gains[HF_BAND] = (conf->ChanMask > 0x1ff) ? W_SCALE_3H3P :
(conf->ChanMask > 0xf) ? W_SCALE_2H2P : 1.0f;
dec->UpSampler[0].Gains[LF_BAND] = 1.0f;
for(i = 1;i < 4;i++)
{
dec->UpSampler[i].Gains[HF_BAND] = (conf->ChanMask > 0x1ff) ? XYZ_SCALE_3H3P :
(conf->ChanMask > 0xf) ? XYZ_SCALE_2H2P : 1.0f;
dec->UpSampler[i].Gains[LF_BAND] = 1.0f;
}
}
else
{
periphonic = false;
dec->UpSampler[0].Gains[HF_BAND] = (conf->ChanMask > 0x1ff) ? W_SCALE_3H0P :
(conf->ChanMask > 0xf) ? W_SCALE_2H0P : 1.0f;
dec->UpSampler[0].Gains[LF_BAND] = 1.0f;
for(i = 1;i < 3;i++)
{
dec->UpSampler[i].Gains[HF_BAND] = (conf->ChanMask > 0x1ff) ? XYZ_SCALE_3H0P :
(conf->ChanMask > 0xf) ? XYZ_SCALE_2H0P : 1.0f;
dec->UpSampler[i].Gains[LF_BAND] = 1.0f;
}
dec->UpSampler[3].Gains[HF_BAND] = 0.0f;
dec->UpSampler[3].Gains[LF_BAND] = 0.0f;
}
memset(&dec->Matrix, 0, sizeof(dec->Matrix));
if(conf->FreqBands == 1)
{
dec->DualBand = AL_FALSE;
for(i = 0;i < conf->NumSpeakers;i++)
{
ALsizei chan = chanmap[i];
ALfloat gain;
ALsizei j, k;
if(!periphonic)
{
for(j = 0,k = 0;j < MAX_AMBI2D_COEFFS;j++)
{
ALsizei l = map2DTo3D[j];
if(j == 0) gain = conf->HFOrderGain[0];
else if(j == 1) gain = conf->HFOrderGain[1];
else if(j == 3) gain = conf->HFOrderGain[2];
else if(j == 5) gain = conf->HFOrderGain[3];
if((conf->ChanMask&(1<<l)))
dec->Matrix.Single[chan][j] = conf->HFMatrix[i][k++] / coeff_scale[l] *
gain;
}
}
else
{
for(j = 0,k = 0;j < MAX_AMBI_COEFFS;j++)
{
if(j == 0) gain = conf->HFOrderGain[0];
else if(j == 1) gain = conf->HFOrderGain[1];
else if(j == 4) gain = conf->HFOrderGain[2];
else if(j == 9) gain = conf->HFOrderGain[3];
if((conf->ChanMask&(1<<j)))
dec->Matrix.Single[chan][j] = conf->HFMatrix[i][k++] / coeff_scale[j] *
gain;
}
}
}
}
else
{
dec->DualBand = AL_TRUE;
ratio = conf->XOverFreq / (ALfloat)srate;
for(i = 0;i < MAX_AMBI_COEFFS;i++)
bandsplit_init(&dec->XOver[i], ratio);
ratio = powf(10.0f, conf->XOverRatio / 40.0f);
for(i = 0;i < conf->NumSpeakers;i++)
{
ALsizei chan = chanmap[i];
ALfloat gain;
ALsizei j, k;
if(!periphonic)
{
for(j = 0,k = 0;j < MAX_AMBI2D_COEFFS;j++)
{
ALsizei l = map2DTo3D[j];
if(j == 0) gain = conf->HFOrderGain[0] * ratio;
else if(j == 1) gain = conf->HFOrderGain[1] * ratio;
else if(j == 3) gain = conf->HFOrderGain[2] * ratio;
else if(j == 5) gain = conf->HFOrderGain[3] * ratio;
if((conf->ChanMask&(1<<l)))
dec->Matrix.Dual[chan][HF_BAND][j] = conf->HFMatrix[i][k++] /
coeff_scale[l] * gain;
}
for(j = 0,k = 0;j < MAX_AMBI2D_COEFFS;j++)
{
ALsizei l = map2DTo3D[j];
if(j == 0) gain = conf->LFOrderGain[0] / ratio;
else if(j == 1) gain = conf->LFOrderGain[1] / ratio;
else if(j == 3) gain = conf->LFOrderGain[2] / ratio;
else if(j == 5) gain = conf->LFOrderGain[3] / ratio;
if((conf->ChanMask&(1<<l)))
dec->Matrix.Dual[chan][LF_BAND][j] = conf->LFMatrix[i][k++] /
coeff_scale[l] * gain;
}
}
else
{
for(j = 0,k = 0;j < MAX_AMBI_COEFFS;j++)
{
if(j == 0) gain = conf->HFOrderGain[0] * ratio;
else if(j == 1) gain = conf->HFOrderGain[1] * ratio;
else if(j == 4) gain = conf->HFOrderGain[2] * ratio;
else if(j == 9) gain = conf->HFOrderGain[3] * ratio;
if((conf->ChanMask&(1<<j)))
dec->Matrix.Dual[chan][HF_BAND][j] = conf->HFMatrix[i][k++] /
coeff_scale[j] * gain;
}
for(j = 0,k = 0;j < MAX_AMBI_COEFFS;j++)
{
if(j == 0) gain = conf->LFOrderGain[0] / ratio;
else if(j == 1) gain = conf->LFOrderGain[1] / ratio;
else if(j == 4) gain = conf->LFOrderGain[2] / ratio;
else if(j == 9) gain = conf->LFOrderGain[3] / ratio;
if((conf->ChanMask&(1<<j)))
dec->Matrix.Dual[chan][LF_BAND][j] = conf->LFMatrix[i][k++] /
coeff_scale[j] * gain;
}
}
}
}
}
void bformatdec_process(struct BFormatDec *dec, ALfloat (*restrict OutBuffer)[BUFFERSIZE], ALsizei OutChannels, const ALfloat (*restrict InSamples)[BUFFERSIZE], ALsizei SamplesToDo)
{
ALsizei chan, i;
OutBuffer = ASSUME_ALIGNED(OutBuffer, 16);
if(dec->DualBand)
{
for(i = 0;i < dec->NumChannels;i++)
bandsplit_process(&dec->XOver[i], dec->SamplesHF[i], dec->SamplesLF[i],
InSamples[i], SamplesToDo);
for(chan = 0;chan < OutChannels;chan++)
{
if(!(dec->Enabled&(1<<chan)))
continue;
memset(dec->ChannelMix, 0, SamplesToDo*sizeof(ALfloat));
MixRowSamples(dec->ChannelMix, dec->Matrix.Dual[chan][HF_BAND],
dec->SamplesHF, dec->NumChannels, 0, SamplesToDo
);
MixRowSamples(dec->ChannelMix, dec->Matrix.Dual[chan][LF_BAND],
dec->SamplesLF, dec->NumChannels, 0, SamplesToDo
);
for(i = 0;i < SamplesToDo;i++)
OutBuffer[chan][i] += dec->ChannelMix[i];
}
}
else
{
for(chan = 0;chan < OutChannels;chan++)
{
if(!(dec->Enabled&(1<<chan)))
continue;
memset(dec->ChannelMix, 0, SamplesToDo*sizeof(ALfloat));
MixRowSamples(dec->ChannelMix, dec->Matrix.Single[chan], InSamples,
dec->NumChannels, 0, SamplesToDo);
for(i = 0;i < SamplesToDo;i++)
OutBuffer[chan][i] += dec->ChannelMix[i];
}
}
}
void bformatdec_upSample(struct BFormatDec *dec, ALfloat (*restrict OutBuffer)[BUFFERSIZE], const ALfloat (*restrict InSamples)[BUFFERSIZE], ALsizei InChannels, ALsizei SamplesToDo)
{
ALsizei i;
/* This up-sampler leverages the differences observed in dual-band second-
* and third-order decoder matrices compared to first-order. For the same
* output channel configuration, the low-frequency matrix has identical
* coefficients in the shared input channels, while the high-frequency
* matrix has extra scalars applied to the W channel and X/Y/Z channels.
* Mixing the first-order content into the higher-order stream with the
* appropriate counter-scales applied to the HF response results in the
* subsequent higher-order decode generating the same response as a first-
* order decode.
*/
for(i = 0;i < InChannels;i++)
{
/* First, split the first-order components into low and high frequency
* bands.
*/
bandsplit_process(&dec->UpSampler[i].XOver,
dec->Samples[HF_BAND], dec->Samples[LF_BAND],
InSamples[i], SamplesToDo
);
/* Now write each band to the output. */
MixRowSamples(OutBuffer[i], dec->UpSampler[i].Gains,
dec->Samples, NUM_BANDS, 0, SamplesToDo
);
}
}
#define INVALID_UPSAMPLE_INDEX INT_MAX
static ALsizei GetACNIndex(const BFChannelConfig *chans, ALsizei numchans, ALsizei acn)
{
ALsizei i;
for(i = 0;i < numchans;i++)
{
if(chans[i].Index == acn)
return i;
}
return INVALID_UPSAMPLE_INDEX;
}
#define GetChannelForACN(b, a) GetACNIndex((b).Ambi.Map, (b).NumChannels, (a))
typedef struct AmbiUpsampler {
alignas(16) ALfloat Samples[NUM_BANDS][BUFFERSIZE];
BandSplitter XOver[4];
ALfloat Gains[4][MAX_OUTPUT_CHANNELS][NUM_BANDS];
} AmbiUpsampler;
AmbiUpsampler *ambiup_alloc()
{
return al_calloc(16, sizeof(AmbiUpsampler));
}
void ambiup_free(struct AmbiUpsampler **ambiup)
{
if(ambiup)
{
al_free(*ambiup);
*ambiup = NULL;
}
}
void ambiup_reset(struct AmbiUpsampler *ambiup, const ALCdevice *device, ALfloat w_scale, ALfloat xyz_scale)
{
ALfloat ratio;
ALsizei i;
ratio = 400.0f / (ALfloat)device->Frequency;
for(i = 0;i < 4;i++)
bandsplit_init(&ambiup->XOver[i], ratio);
memset(ambiup->Gains, 0, sizeof(ambiup->Gains));
if(device->Dry.CoeffCount > 0)
{
ALfloat encgains[8][MAX_OUTPUT_CHANNELS];
ALsizei j;
size_t k;
for(k = 0;k < COUNTOF(Ambi3DPoints);k++)
{
ALfloat coeffs[MAX_AMBI_COEFFS] = { 0.0f };
CalcDirectionCoeffs(Ambi3DPoints[k], 0.0f, coeffs);
ComputeDryPanGains(&device->Dry, coeffs, 1.0f, encgains[k]);
}
/* Combine the matrices that do the in->virt and virt->out conversions
* so we get a single in->out conversion. NOTE: the Encoder matrix
* (encgains) and output are transposed, so the input channels line up
* with the rows and the output channels line up with the columns.
*/
for(i = 0;i < 4;i++)
{
for(j = 0;j < device->Dry.NumChannels;j++)
{
ALfloat gain=0.0f;
for(k = 0;k < COUNTOF(Ambi3DDecoder);k++)
gain += Ambi3DDecoder[k][i] * encgains[k][j];
ambiup->Gains[i][j][HF_BAND] = gain * Ambi3DDecoderHFScale[i];
ambiup->Gains[i][j][LF_BAND] = gain;
}
}
}
else
{
for(i = 0;i < 4;i++)
{
ALsizei index = GetChannelForACN(device->Dry, i);
if(index != INVALID_UPSAMPLE_INDEX)
{
ALfloat scale = device->Dry.Ambi.Map[index].Scale;
ambiup->Gains[i][index][HF_BAND] = scale * ((i==0) ? w_scale : xyz_scale);
ambiup->Gains[i][index][LF_BAND] = scale;
}
}
}
}
void ambiup_process(struct AmbiUpsampler *ambiup, ALfloat (*restrict OutBuffer)[BUFFERSIZE], ALsizei OutChannels, const ALfloat (*restrict InSamples)[BUFFERSIZE], ALsizei SamplesToDo)
{
ALsizei i, j;
for(i = 0;i < 4;i++)
{
bandsplit_process(&ambiup->XOver[i],
ambiup->Samples[HF_BAND], ambiup->Samples[LF_BAND],
InSamples[i], SamplesToDo
);
for(j = 0;j < OutChannels;j++)
MixRowSamples(OutBuffer[j], ambiup->Gains[i][j],
ambiup->Samples, NUM_BANDS, 0, SamplesToDo
);
}
}

View file

@ -0,0 +1,298 @@
#include "config.h"
#include "bformatdec.h"
#include <algorithm>
#include <array>
#include <cassert>
#include <cmath>
#include <iterator>
#include <numeric>
#include "almalloc.h"
#include "alu.h"
#include "core/ambdec.h"
#include "core/filters/splitter.h"
#include "front_stablizer.h"
#include "math_defs.h"
#include "opthelpers.h"
namespace {
constexpr std::array<float,MaxAmbiOrder+1> Ambi3DDecoderHFScale{{
1.00000000e+00f, 1.00000000e+00f
}};
constexpr std::array<float,MaxAmbiOrder+1> Ambi3DDecoderHFScale2O{{
7.45355990e-01f, 1.00000000e+00f, 1.00000000e+00f
}};
constexpr std::array<float,MaxAmbiOrder+1> Ambi3DDecoderHFScale3O{{
5.89792205e-01f, 8.79693856e-01f, 1.00000000e+00f, 1.00000000e+00f
}};
inline auto& GetDecoderHFScales(uint order) noexcept
{
if(order >= 3) return Ambi3DDecoderHFScale3O;
if(order == 2) return Ambi3DDecoderHFScale2O;
return Ambi3DDecoderHFScale;
}
inline auto& GetAmbiScales(AmbDecScale scaletype) noexcept
{
if(scaletype == AmbDecScale::FuMa) return AmbiScale::FromFuMa();
if(scaletype == AmbDecScale::SN3D) return AmbiScale::FromSN3D();
return AmbiScale::FromN3D();
}
} // namespace
BFormatDec::BFormatDec(const AmbDecConf *conf, const bool allow_2band, const size_t inchans,
const uint srate, const uint (&chanmap)[MAX_OUTPUT_CHANNELS],
std::unique_ptr<FrontStablizer> stablizer)
: mStablizer{std::move(stablizer)}, mDualBand{allow_2band && (conf->FreqBands == 2)}
, mChannelDec{inchans}
{
const bool periphonic{(conf->ChanMask&AmbiPeriphonicMask) != 0};
auto&& coeff_scale = GetAmbiScales(conf->CoeffScale);
if(!mDualBand)
{
for(size_t j{0},k{0};j < mChannelDec.size();++j)
{
const size_t acn{periphonic ? j : AmbiIndex::FromACN2D()[j]};
if(!(conf->ChanMask&(1u<<acn))) continue;
const size_t order{AmbiIndex::OrderFromChannel()[acn]};
const float gain{conf->HFOrderGain[order] / coeff_scale[acn]};
for(size_t i{0u};i < conf->NumSpeakers;++i)
{
const size_t chanidx{chanmap[i]};
mChannelDec[j].mGains.Single[chanidx] = conf->Matrix[i][k] * gain;
}
++k;
}
}
else
{
mChannelDec[0].mXOver.init(conf->XOverFreq / static_cast<float>(srate));
for(size_t j{1};j < mChannelDec.size();++j)
mChannelDec[j].mXOver = mChannelDec[0].mXOver;
const float ratio{std::pow(10.0f, conf->XOverRatio / 40.0f)};
for(size_t j{0},k{0};j < mChannelDec.size();++j)
{
const size_t acn{periphonic ? j : AmbiIndex::FromACN2D()[j]};
if(!(conf->ChanMask&(1u<<acn))) continue;
const size_t order{AmbiIndex::OrderFromChannel()[acn]};
const float hfGain{conf->HFOrderGain[order] * ratio / coeff_scale[acn]};
const float lfGain{conf->LFOrderGain[order] / ratio / coeff_scale[acn]};
for(size_t i{0u};i < conf->NumSpeakers;++i)
{
const size_t chanidx{chanmap[i]};
mChannelDec[j].mGains.Dual[sHFBand][chanidx] = conf->HFMatrix[i][k] * hfGain;
mChannelDec[j].mGains.Dual[sLFBand][chanidx] = conf->LFMatrix[i][k] * lfGain;
}
++k;
}
}
}
BFormatDec::BFormatDec(const size_t inchans, const al::span<const ChannelDec> coeffs,
const al::span<const ChannelDec> coeffslf, std::unique_ptr<FrontStablizer> stablizer)
: mStablizer{std::move(stablizer)}, mDualBand{!coeffslf.empty()}, mChannelDec{inchans}
{
if(!mDualBand)
{
for(size_t j{0};j < mChannelDec.size();++j)
{
float *outcoeffs{mChannelDec[j].mGains.Single};
for(const ChannelDec &incoeffs : coeffs)
*(outcoeffs++) = incoeffs[j];
}
}
else
{
for(size_t j{0};j < mChannelDec.size();++j)
{
float *outcoeffs{mChannelDec[j].mGains.Dual[sHFBand]};
for(const ChannelDec &incoeffs : coeffs)
*(outcoeffs++) = incoeffs[j];
outcoeffs = mChannelDec[j].mGains.Dual[sLFBand];
for(const ChannelDec &incoeffs : coeffslf)
*(outcoeffs++) = incoeffs[j];
}
}
}
void BFormatDec::process(const al::span<FloatBufferLine> OutBuffer,
const FloatBufferLine *InSamples, const size_t SamplesToDo)
{
ASSUME(SamplesToDo > 0);
if(mDualBand)
{
const al::span<float> hfSamples{mSamples[sHFBand].data(), SamplesToDo};
const al::span<float> lfSamples{mSamples[sLFBand].data(), SamplesToDo};
for(auto &chandec : mChannelDec)
{
chandec.mXOver.process({InSamples->data(), SamplesToDo}, hfSamples.data(),
lfSamples.data());
MixSamples(hfSamples, OutBuffer, chandec.mGains.Dual[sHFBand],
chandec.mGains.Dual[sHFBand], 0, 0);
MixSamples(lfSamples, OutBuffer, chandec.mGains.Dual[sLFBand],
chandec.mGains.Dual[sLFBand], 0, 0);
++InSamples;
}
}
else
{
for(auto &chandec : mChannelDec)
{
MixSamples({InSamples->data(), SamplesToDo}, OutBuffer, chandec.mGains.Single,
chandec.mGains.Single, 0, 0);
++InSamples;
}
}
}
void BFormatDec::processStablize(const al::span<FloatBufferLine> OutBuffer,
const FloatBufferLine *InSamples, const size_t lidx, const size_t ridx, const size_t cidx,
const size_t SamplesToDo)
{
ASSUME(SamplesToDo > 0);
/* Move the existing direct L/R signal out so it doesn't get processed by
* the stablizer. Add a delay to it so it stays aligned with the stablizer
* delay.
*/
float *RESTRICT mid{al::assume_aligned<16>(mStablizer->MidDirect.data())};
float *RESTRICT side{al::assume_aligned<16>(mStablizer->Side.data())};
for(size_t i{0};i < SamplesToDo;++i)
{
mid[FrontStablizer::DelayLength+i] = OutBuffer[lidx][i] + OutBuffer[ridx][i];
side[FrontStablizer::DelayLength+i] = OutBuffer[lidx][i] - OutBuffer[ridx][i];
}
std::fill_n(OutBuffer[lidx].begin(), SamplesToDo, 0.0f);
std::fill_n(OutBuffer[ridx].begin(), SamplesToDo, 0.0f);
/* Decode the B-Format input to OutBuffer. */
process(OutBuffer, InSamples, SamplesToDo);
/* Apply a delay to all channels, except the front-left and front-right, so
* they maintain correct timing.
*/
const size_t NumChannels{OutBuffer.size()};
for(size_t i{0u};i < NumChannels;i++)
{
if(i == lidx || i == ridx)
continue;
auto &DelayBuf = mStablizer->DelayBuf[i];
auto buffer_end = OutBuffer[i].begin() + SamplesToDo;
if LIKELY(SamplesToDo >= FrontStablizer::DelayLength)
{
auto delay_end = std::rotate(OutBuffer[i].begin(),
buffer_end - FrontStablizer::DelayLength, buffer_end);
std::swap_ranges(OutBuffer[i].begin(), delay_end, DelayBuf.begin());
}
else
{
auto delay_start = std::swap_ranges(OutBuffer[i].begin(), buffer_end,
DelayBuf.begin());
std::rotate(DelayBuf.begin(), delay_start, DelayBuf.end());
}
}
/* Include the side signal for what was just decoded. */
for(size_t i{0};i < SamplesToDo;++i)
side[FrontStablizer::DelayLength+i] += OutBuffer[lidx][i] - OutBuffer[ridx][i];
/* Combine the delayed mid signal with the decoded mid signal. Note that
* the samples are stored and combined in reverse, so the newest samples
* are at the front and the oldest at the back.
*/
al::span<float> tmpbuf{mStablizer->TempBuf.data(), SamplesToDo+FrontStablizer::DelayLength};
auto tmpiter = tmpbuf.begin() + SamplesToDo;
std::copy(mStablizer->MidDelay.cbegin(), mStablizer->MidDelay.cend(), tmpiter);
for(size_t i{0};i < SamplesToDo;++i)
*--tmpiter = OutBuffer[lidx][i] + OutBuffer[ridx][i];
/* Save the newest samples for next time. */
std::copy_n(tmpbuf.cbegin(), mStablizer->MidDelay.size(), mStablizer->MidDelay.begin());
/* Apply an all-pass on the reversed signal, then reverse the samples to
* get the forward signal with a reversed phase shift. The future samples
* are included with the all-pass to reduce the error in the output
* samples (the smaller the delay, the more error is introduced).
*/
mStablizer->MidFilter.applyAllpass(tmpbuf);
tmpbuf = tmpbuf.subspan<FrontStablizer::DelayLength>();
std::reverse(tmpbuf.begin(), tmpbuf.end());
/* Now apply the band-splitter, combining its phase shift with the reversed
* phase shift, restoring the original phase on the split signal.
*/
mStablizer->MidFilter.process(tmpbuf, mStablizer->MidHF.data(), mStablizer->MidLF.data());
/* This pans the separate low- and high-frequency signals between being on
* the center channel and the left+right channels. The low-frequency signal
* is panned 1/3rd toward center and the high-frequency signal is panned
* 1/4th toward center. These values can be tweaked.
*/
const float cos_lf{std::cos(1.0f/3.0f * (al::MathDefs<float>::Pi()*0.5f))};
const float cos_hf{std::cos(1.0f/4.0f * (al::MathDefs<float>::Pi()*0.5f))};
const float sin_lf{std::sin(1.0f/3.0f * (al::MathDefs<float>::Pi()*0.5f))};
const float sin_hf{std::sin(1.0f/4.0f * (al::MathDefs<float>::Pi()*0.5f))};
for(size_t i{0};i < SamplesToDo;i++)
{
const float m{mStablizer->MidLF[i]*cos_lf + mStablizer->MidHF[i]*cos_hf + mid[i]};
const float c{mStablizer->MidLF[i]*sin_lf + mStablizer->MidHF[i]*sin_hf};
const float s{side[i]};
/* The generated center channel signal adds to the existing signal,
* while the modified left and right channels replace.
*/
OutBuffer[lidx][i] = (m + s) * 0.5f;
OutBuffer[ridx][i] = (m - s) * 0.5f;
OutBuffer[cidx][i] += c * 0.5f;
}
/* Move the delayed mid/side samples to the front for next time. */
auto mid_end = mStablizer->MidDirect.cbegin() + SamplesToDo;
std::copy(mid_end, mid_end+FrontStablizer::DelayLength, mStablizer->MidDirect.begin());
auto side_end = mStablizer->Side.cbegin() + SamplesToDo;
std::copy(side_end, side_end+FrontStablizer::DelayLength, mStablizer->Side.begin());
}
auto BFormatDec::GetHFOrderScales(const uint in_order, const uint out_order) noexcept
-> std::array<float,MaxAmbiOrder+1>
{
std::array<float,MaxAmbiOrder+1> ret{};
assert(out_order >= in_order);
const auto &target = GetDecoderHFScales(out_order);
const auto &input = GetDecoderHFScales(in_order);
for(size_t i{0};i < in_order+1;++i)
ret[i] = input[i] / target[i];
return ret;
}
std::unique_ptr<BFormatDec> BFormatDec::Create(const AmbDecConf *conf, const bool allow_2band,
const size_t inchans, const uint srate, const uint (&chanmap)[MAX_OUTPUT_CHANNELS],
std::unique_ptr<FrontStablizer> stablizer)
{
return std::unique_ptr<BFormatDec>{new(FamCount(inchans))
BFormatDec{conf, allow_2band, inchans, srate, chanmap, std::move(stablizer)}};
}
std::unique_ptr<BFormatDec> BFormatDec::Create(const size_t inchans,
const al::span<const ChannelDec> coeffs, const al::span<const ChannelDec> coeffslf,
std::unique_ptr<FrontStablizer> stablizer)
{
return std::unique_ptr<BFormatDec>{new(FamCount(inchans))
BFormatDec{inchans, coeffs, coeffslf, std::move(stablizer)}};
}

View file

@ -1,57 +1,75 @@
#ifndef BFORMATDEC_H
#define BFORMATDEC_H
#include "alMain.h"
/* These are the necessary scales for first-order HF responses to play over
* higher-order 2D (non-periphonic) decoders.
*/
#define W_SCALE_2H0P 1.224744871f /* sqrt(1.5) */
#define XYZ_SCALE_2H0P 1.0f
#define W_SCALE_3H0P 1.414213562f /* sqrt(2) */
#define XYZ_SCALE_3H0P 1.082392196f
/* These are the necessary scales for first-order HF responses to play over
* higher-order 3D (periphonic) decoders.
*/
#define W_SCALE_2H2P 1.341640787f /* sqrt(1.8) */
#define XYZ_SCALE_2H2P 1.0f
#define W_SCALE_3H3P 1.695486018f
#define XYZ_SCALE_3H3P 1.136697713f
/* NOTE: These are scale factors as applied to Ambisonics content. Decoder
* coefficients should be divided by these values to get proper N3D scalings.
*/
const ALfloat N3D2N3DScale[MAX_AMBI_COEFFS];
const ALfloat SN3D2N3DScale[MAX_AMBI_COEFFS];
const ALfloat FuMa2N3DScale[MAX_AMBI_COEFFS];
#include <array>
#include <cstddef>
#include <memory>
#include "almalloc.h"
#include "alspan.h"
#include "core/ambidefs.h"
#include "core/bufferline.h"
#include "core/devformat.h"
#include "core/filters/splitter.h"
struct AmbDecConf;
struct BFormatDec;
struct AmbiUpsampler;
struct FrontStablizer;
struct BFormatDec *bformatdec_alloc();
void bformatdec_free(struct BFormatDec **dec);
void bformatdec_reset(struct BFormatDec *dec, const struct AmbDecConf *conf, ALsizei chancount, ALuint srate, const ALsizei chanmap[MAX_OUTPUT_CHANNELS]);
using ChannelDec = std::array<float,MaxAmbiChannels>;
/* Decodes the ambisonic input to the given output channels. */
void bformatdec_process(struct BFormatDec *dec, ALfloat (*restrict OutBuffer)[BUFFERSIZE], ALsizei OutChannels, const ALfloat (*restrict InSamples)[BUFFERSIZE], ALsizei SamplesToDo);
class BFormatDec {
static constexpr size_t sHFBand{0};
static constexpr size_t sLFBand{1};
static constexpr size_t sNumBands{2};
/* Up-samples a first-order input to the decoder's configuration. */
void bformatdec_upSample(struct BFormatDec *dec, ALfloat (*restrict OutBuffer)[BUFFERSIZE], const ALfloat (*restrict InSamples)[BUFFERSIZE], ALsizei InChannels, ALsizei SamplesToDo);
struct ChannelDecoder {
union MatrixU {
float Dual[sNumBands][MAX_OUTPUT_CHANNELS];
float Single[MAX_OUTPUT_CHANNELS];
} mGains{};
/* NOTE: BandSplitter filter is unused with single-band decoding. */
BandSplitter mXOver;
};
/* Stand-alone first-order upsampler. Kept here because it shares some stuff
* with bformatdec. Assumes a periphonic (4-channel) input mix!
*/
struct AmbiUpsampler *ambiup_alloc();
void ambiup_free(struct AmbiUpsampler **ambiup);
void ambiup_reset(struct AmbiUpsampler *ambiup, const ALCdevice *device, ALfloat w_scale, ALfloat xyz_scale);
alignas(16) std::array<FloatBufferLine,2> mSamples;
void ambiup_process(struct AmbiUpsampler *ambiup, ALfloat (*restrict OutBuffer)[BUFFERSIZE], ALsizei OutChannels, const ALfloat (*restrict InSamples)[BUFFERSIZE], ALsizei SamplesToDo);
const std::unique_ptr<FrontStablizer> mStablizer;
const bool mDualBand{false};
al::FlexArray<ChannelDecoder> mChannelDec;
public:
BFormatDec(const AmbDecConf *conf, const bool allow_2band, const size_t inchans,
const uint srate, const uint (&chanmap)[MAX_OUTPUT_CHANNELS],
std::unique_ptr<FrontStablizer> stablizer);
BFormatDec(const size_t inchans, const al::span<const ChannelDec> coeffs,
const al::span<const ChannelDec> coeffslf, std::unique_ptr<FrontStablizer> stablizer);
bool hasStablizer() const noexcept { return mStablizer != nullptr; };
/* Decodes the ambisonic input to the given output channels. */
void process(const al::span<FloatBufferLine> OutBuffer, const FloatBufferLine *InSamples,
const size_t SamplesToDo);
/* Decodes the ambisonic input to the given output channels with stablization. */
void processStablize(const al::span<FloatBufferLine> OutBuffer,
const FloatBufferLine *InSamples, const size_t lidx, const size_t ridx, const size_t cidx,
const size_t SamplesToDo);
/* Retrieves per-order HF scaling factors for "upsampling" ambisonic data. */
static std::array<float,MaxAmbiOrder+1> GetHFOrderScales(const uint in_order,
const uint out_order) noexcept;
static std::unique_ptr<BFormatDec> Create(const AmbDecConf *conf, const bool allow_2band,
const size_t inchans, const uint srate, const uint (&chanmap)[MAX_OUTPUT_CHANNELS],
std::unique_ptr<FrontStablizer> stablizer);
static std::unique_ptr<BFormatDec> Create(const size_t inchans,
const al::span<const ChannelDec> coeffs, const al::span<const ChannelDec> coeffslf,
std::unique_ptr<FrontStablizer> stablizer);
DEF_FAM_NEWDEL(BFormatDec, mChannelDec)
};
#endif /* BFORMATDEC_H */

View file

@ -0,0 +1,38 @@
#include "config.h"
#include "buffer_storage.h"
#include <cstdint>
uint BytesFromFmt(FmtType type) noexcept
{
switch(type)
{
case FmtUByte: return sizeof(uint8_t);
case FmtShort: return sizeof(int16_t);
case FmtFloat: return sizeof(float);
case FmtDouble: return sizeof(double);
case FmtMulaw: return sizeof(uint8_t);
case FmtAlaw: return sizeof(uint8_t);
}
return 0;
}
uint ChannelsFromFmt(FmtChannels chans, uint ambiorder) noexcept
{
switch(chans)
{
case FmtMono: return 1;
case FmtStereo: return 2;
case FmtRear: return 2;
case FmtQuad: return 4;
case FmtX51: return 6;
case FmtX61: return 7;
case FmtX71: return 8;
case FmtBFormat2D: return (ambiorder*2) + 1;
case FmtBFormat3D: return (ambiorder+1) * (ambiorder+1);
}
return 0;
}

View file

@ -0,0 +1,72 @@
#ifndef ALC_BUFFER_STORAGE_H
#define ALC_BUFFER_STORAGE_H
#include <atomic>
#include "albyte.h"
using uint = unsigned int;
/* Storable formats */
enum FmtType : unsigned char {
FmtUByte,
FmtShort,
FmtFloat,
FmtDouble,
FmtMulaw,
FmtAlaw,
};
enum FmtChannels : unsigned char {
FmtMono,
FmtStereo,
FmtRear,
FmtQuad,
FmtX51, /* (WFX order) */
FmtX61, /* (WFX order) */
FmtX71, /* (WFX order) */
FmtBFormat2D,
FmtBFormat3D,
};
enum class AmbiLayout : unsigned char {
FuMa,
ACN,
};
enum class AmbiScaling : unsigned char {
FuMa,
SN3D,
N3D,
};
uint BytesFromFmt(FmtType type) noexcept;
uint ChannelsFromFmt(FmtChannels chans, uint ambiorder) noexcept;
inline uint FrameSizeFromFmt(FmtChannels chans, FmtType type, uint ambiorder) noexcept
{ return ChannelsFromFmt(chans, ambiorder) * BytesFromFmt(type); }
using CallbackType = int(*)(void*, void*, int);
struct BufferStorage {
CallbackType mCallback{nullptr};
void *mUserData{nullptr};
uint mSampleRate{0u};
FmtChannels mChannels{FmtMono};
FmtType mType{FmtShort};
uint mSampleLen{0u};
AmbiLayout mAmbiLayout{AmbiLayout::FuMa};
AmbiScaling mAmbiScaling{AmbiScaling::FuMa};
uint mAmbiOrder{0u};
inline uint bytesFromFmt() const noexcept { return BytesFromFmt(mType); }
inline uint channelsFromFmt() const noexcept
{ return ChannelsFromFmt(mChannels, mAmbiOrder); }
inline uint frameSizeFromFmt() const noexcept { return channelsFromFmt() * bytesFromFmt(); }
inline bool isBFormat() const noexcept
{ return mChannels == FmtBFormat2D || mChannels == FmtBFormat3D; }
};
#endif /* ALC_BUFFER_STORAGE_H */

View file

@ -1,65 +1,9 @@
#ifndef AL_COMPAT_H
#define AL_COMPAT_H
#include "alstring.h"
#include <string>
#ifdef __cplusplus
extern "C" {
#endif
#ifdef _WIN32
#define WIN32_LEAN_AND_MEAN
#include <windows.h>
WCHAR *strdupW(const WCHAR *str);
/* Opens a file with standard I/O. The filename is expected to be UTF-8. */
FILE *al_fopen(const char *fname, const char *mode);
#define HAVE_DYNLOAD 1
#else
#define al_fopen fopen
#if defined(HAVE_DLFCN_H) && !defined(IN_IDE_PARSER)
#define HAVE_DYNLOAD 1
#endif
#endif
struct FileMapping {
#ifdef _WIN32
HANDLE file;
HANDLE fmap;
#else
int fd;
#endif
void *ptr;
size_t len;
};
struct FileMapping MapFileToMem(const char *fname);
void UnmapFileMem(const struct FileMapping *mapping);
void GetProcBinary(al_string *path, al_string *fname);
#ifdef HAVE_DYNLOAD
void *LoadLib(const char *name);
void CloseLib(void *handle);
void *GetSymbol(void *handle, const char *name);
#endif
#ifdef __ANDROID__
#define JCALL(obj, func) ((*(obj))->func((obj), EXTRACT_VCALL_ARGS
#define JCALL0(obj, func) ((*(obj))->func((obj) EXTRACT_VCALL_ARGS
/** Returns a JNIEnv*. */
void *Android_GetJNIEnv(void);
#endif
#ifdef __cplusplus
} /* extern "C" */
#endif
struct PathNamePair { std::string path, fname; };
const PathNamePair &GetProcBinary(void);
#endif /* AL_COMPAT_H */

View file

@ -1,468 +0,0 @@
#include "config.h"
#include "converter.h"
#include "fpu_modes.h"
#include "mixer/defs.h"
SampleConverter *CreateSampleConverter(enum DevFmtType srcType, enum DevFmtType dstType, ALsizei numchans, ALsizei srcRate, ALsizei dstRate)
{
SampleConverter *converter;
ALsizei step;
if(numchans <= 0 || srcRate <= 0 || dstRate <= 0)
return NULL;
converter = al_calloc(16, FAM_SIZE(SampleConverter, Chan, numchans));
converter->mSrcType = srcType;
converter->mDstType = dstType;
converter->mNumChannels = numchans;
converter->mSrcTypeSize = BytesFromDevFmt(srcType);
converter->mDstTypeSize = BytesFromDevFmt(dstType);
converter->mSrcPrepCount = 0;
converter->mFracOffset = 0;
/* Have to set the mixer FPU mode since that's what the resampler code expects. */
START_MIXER_MODE();
step = (ALsizei)mind(((ALdouble)srcRate/dstRate*FRACTIONONE) + 0.5,
MAX_PITCH * FRACTIONONE);
converter->mIncrement = maxi(step, 1);
if(converter->mIncrement == FRACTIONONE)
converter->mResample = Resample_copy_C;
else
{
/* TODO: Allow other resamplers. */
BsincPrepare(converter->mIncrement, &converter->mState.bsinc, &bsinc12);
converter->mResample = SelectResampler(BSinc12Resampler);
}
END_MIXER_MODE();
return converter;
}
void DestroySampleConverter(SampleConverter **converter)
{
if(converter)
{
al_free(*converter);
*converter = NULL;
}
}
static inline ALfloat Sample_ALbyte(ALbyte val)
{ return val * (1.0f/128.0f); }
static inline ALfloat Sample_ALubyte(ALubyte val)
{ return Sample_ALbyte((ALint)val - 128); }
static inline ALfloat Sample_ALshort(ALshort val)
{ return val * (1.0f/32768.0f); }
static inline ALfloat Sample_ALushort(ALushort val)
{ return Sample_ALshort((ALint)val - 32768); }
static inline ALfloat Sample_ALint(ALint val)
{ return (val>>7) * (1.0f/16777216.0f); }
static inline ALfloat Sample_ALuint(ALuint val)
{ return Sample_ALint(val - INT_MAX - 1); }
static inline ALfloat Sample_ALfloat(ALfloat val)
{ return val; }
#define DECL_TEMPLATE(T) \
static inline void Load_##T(ALfloat *restrict dst, const T *restrict src, \
ALint srcstep, ALsizei samples) \
{ \
ALsizei i; \
for(i = 0;i < samples;i++) \
dst[i] = Sample_##T(src[i*srcstep]); \
}
DECL_TEMPLATE(ALbyte)
DECL_TEMPLATE(ALubyte)
DECL_TEMPLATE(ALshort)
DECL_TEMPLATE(ALushort)
DECL_TEMPLATE(ALint)
DECL_TEMPLATE(ALuint)
DECL_TEMPLATE(ALfloat)
#undef DECL_TEMPLATE
static void LoadSamples(ALfloat *dst, const ALvoid *src, ALint srcstep, enum DevFmtType srctype, ALsizei samples)
{
switch(srctype)
{
case DevFmtByte:
Load_ALbyte(dst, src, srcstep, samples);
break;
case DevFmtUByte:
Load_ALubyte(dst, src, srcstep, samples);
break;
case DevFmtShort:
Load_ALshort(dst, src, srcstep, samples);
break;
case DevFmtUShort:
Load_ALushort(dst, src, srcstep, samples);
break;
case DevFmtInt:
Load_ALint(dst, src, srcstep, samples);
break;
case DevFmtUInt:
Load_ALuint(dst, src, srcstep, samples);
break;
case DevFmtFloat:
Load_ALfloat(dst, src, srcstep, samples);
break;
}
}
static inline ALbyte ALbyte_Sample(ALfloat val)
{ return fastf2i(clampf(val*128.0f, -128.0f, 127.0f)); }
static inline ALubyte ALubyte_Sample(ALfloat val)
{ return ALbyte_Sample(val)+128; }
static inline ALshort ALshort_Sample(ALfloat val)
{ return fastf2i(clampf(val*32768.0f, -32768.0f, 32767.0f)); }
static inline ALushort ALushort_Sample(ALfloat val)
{ return ALshort_Sample(val)+32768; }
static inline ALint ALint_Sample(ALfloat val)
{ return fastf2i(clampf(val*16777216.0f, -16777216.0f, 16777215.0f)) << 7; }
static inline ALuint ALuint_Sample(ALfloat val)
{ return ALint_Sample(val)+INT_MAX+1; }
static inline ALfloat ALfloat_Sample(ALfloat val)
{ return val; }
#define DECL_TEMPLATE(T) \
static inline void Store_##T(T *restrict dst, const ALfloat *restrict src, \
ALint dststep, ALsizei samples) \
{ \
ALsizei i; \
for(i = 0;i < samples;i++) \
dst[i*dststep] = T##_Sample(src[i]); \
}
DECL_TEMPLATE(ALbyte)
DECL_TEMPLATE(ALubyte)
DECL_TEMPLATE(ALshort)
DECL_TEMPLATE(ALushort)
DECL_TEMPLATE(ALint)
DECL_TEMPLATE(ALuint)
DECL_TEMPLATE(ALfloat)
#undef DECL_TEMPLATE
static void StoreSamples(ALvoid *dst, const ALfloat *src, ALint dststep, enum DevFmtType dsttype, ALsizei samples)
{
switch(dsttype)
{
case DevFmtByte:
Store_ALbyte(dst, src, dststep, samples);
break;
case DevFmtUByte:
Store_ALubyte(dst, src, dststep, samples);
break;
case DevFmtShort:
Store_ALshort(dst, src, dststep, samples);
break;
case DevFmtUShort:
Store_ALushort(dst, src, dststep, samples);
break;
case DevFmtInt:
Store_ALint(dst, src, dststep, samples);
break;
case DevFmtUInt:
Store_ALuint(dst, src, dststep, samples);
break;
case DevFmtFloat:
Store_ALfloat(dst, src, dststep, samples);
break;
}
}
ALsizei SampleConverterAvailableOut(SampleConverter *converter, ALsizei srcframes)
{
ALint prepcount = converter->mSrcPrepCount;
ALsizei increment = converter->mIncrement;
ALsizei DataPosFrac = converter->mFracOffset;
ALuint64 DataSize64;
if(prepcount < 0)
{
/* Negative prepcount means we need to skip that many input samples. */
if(-prepcount >= srcframes)
return 0;
srcframes += prepcount;
prepcount = 0;
}
if(srcframes < 1)
{
/* No output samples if there's no input samples. */
return 0;
}
if(prepcount < MAX_RESAMPLE_PADDING*2 &&
MAX_RESAMPLE_PADDING*2 - prepcount >= srcframes)
{
/* Not enough input samples to generate an output sample. */
return 0;
}
DataSize64 = prepcount;
DataSize64 += srcframes;
DataSize64 -= MAX_RESAMPLE_PADDING*2;
DataSize64 <<= FRACTIONBITS;
DataSize64 -= DataPosFrac;
/* If we have a full prep, we can generate at least one sample. */
return (ALsizei)clampu64((DataSize64 + increment-1)/increment, 1, BUFFERSIZE);
}
ALsizei SampleConverterInput(SampleConverter *converter, const ALvoid **src, ALsizei *srcframes, ALvoid *dst, ALsizei dstframes)
{
const ALsizei SrcFrameSize = converter->mNumChannels * converter->mSrcTypeSize;
const ALsizei DstFrameSize = converter->mNumChannels * converter->mDstTypeSize;
const ALsizei increment = converter->mIncrement;
ALsizei pos = 0;
START_MIXER_MODE();
while(pos < dstframes && *srcframes > 0)
{
ALfloat *restrict SrcData = ASSUME_ALIGNED(converter->mSrcSamples, 16);
ALfloat *restrict DstData = ASSUME_ALIGNED(converter->mDstSamples, 16);
ALint prepcount = converter->mSrcPrepCount;
ALsizei DataPosFrac = converter->mFracOffset;
ALuint64 DataSize64;
ALsizei DstSize;
ALint toread;
ALsizei chan;
if(prepcount < 0)
{
/* Negative prepcount means we need to skip that many input samples. */
if(-prepcount >= *srcframes)
{
converter->mSrcPrepCount = prepcount + *srcframes;
*srcframes = 0;
break;
}
*src = (const ALbyte*)*src + SrcFrameSize*-prepcount;
*srcframes += prepcount;
converter->mSrcPrepCount = 0;
continue;
}
toread = mini(*srcframes, BUFFERSIZE - MAX_RESAMPLE_PADDING*2);
if(prepcount < MAX_RESAMPLE_PADDING*2 &&
MAX_RESAMPLE_PADDING*2 - prepcount >= toread)
{
/* Not enough input samples to generate an output sample. Store
* what we're given for later.
*/
for(chan = 0;chan < converter->mNumChannels;chan++)
LoadSamples(&converter->Chan[chan].mPrevSamples[prepcount],
(const ALbyte*)*src + converter->mSrcTypeSize*chan,
converter->mNumChannels, converter->mSrcType, toread
);
converter->mSrcPrepCount = prepcount + toread;
*srcframes = 0;
break;
}
DataSize64 = prepcount;
DataSize64 += toread;
DataSize64 -= MAX_RESAMPLE_PADDING*2;
DataSize64 <<= FRACTIONBITS;
DataSize64 -= DataPosFrac;
/* If we have a full prep, we can generate at least one sample. */
DstSize = (ALsizei)clampu64((DataSize64 + increment-1)/increment, 1, BUFFERSIZE);
DstSize = mini(DstSize, dstframes-pos);
for(chan = 0;chan < converter->mNumChannels;chan++)
{
const ALbyte *SrcSamples = (const ALbyte*)*src + converter->mSrcTypeSize*chan;
ALbyte *DstSamples = (ALbyte*)dst + converter->mDstTypeSize*chan;
const ALfloat *ResampledData;
ALsizei SrcDataEnd;
/* Load the previous samples into the source data first, then the
* new samples from the input buffer.
*/
memcpy(SrcData, converter->Chan[chan].mPrevSamples,
prepcount*sizeof(ALfloat));
LoadSamples(SrcData + prepcount, SrcSamples,
converter->mNumChannels, converter->mSrcType, toread
);
/* Store as many prep samples for next time as possible, given the
* number of output samples being generated.
*/
SrcDataEnd = (DataPosFrac + increment*DstSize)>>FRACTIONBITS;
if(SrcDataEnd >= prepcount+toread)
memset(converter->Chan[chan].mPrevSamples, 0,
sizeof(converter->Chan[chan].mPrevSamples));
else
{
size_t len = mini(MAX_RESAMPLE_PADDING*2, prepcount+toread-SrcDataEnd);
memcpy(converter->Chan[chan].mPrevSamples, &SrcData[SrcDataEnd],
len*sizeof(ALfloat));
memset(converter->Chan[chan].mPrevSamples+len, 0,
sizeof(converter->Chan[chan].mPrevSamples) - len*sizeof(ALfloat));
}
/* Now resample, and store the result in the output buffer. */
ResampledData = converter->mResample(&converter->mState,
SrcData+MAX_RESAMPLE_PADDING, DataPosFrac, increment,
DstData, DstSize
);
StoreSamples(DstSamples, ResampledData, converter->mNumChannels,
converter->mDstType, DstSize);
}
/* Update the number of prep samples still available, as well as the
* fractional offset.
*/
DataPosFrac += increment*DstSize;
converter->mSrcPrepCount = mini(prepcount + toread - (DataPosFrac>>FRACTIONBITS),
MAX_RESAMPLE_PADDING*2);
converter->mFracOffset = DataPosFrac & FRACTIONMASK;
/* Update the src and dst pointers in case there's still more to do. */
*src = (const ALbyte*)*src + SrcFrameSize*(DataPosFrac>>FRACTIONBITS);
*srcframes -= mini(*srcframes, (DataPosFrac>>FRACTIONBITS));
dst = (ALbyte*)dst + DstFrameSize*DstSize;
pos += DstSize;
}
END_MIXER_MODE();
return pos;
}
ChannelConverter *CreateChannelConverter(enum DevFmtType srcType, enum DevFmtChannels srcChans, enum DevFmtChannels dstChans)
{
ChannelConverter *converter;
if(srcChans != dstChans && !((srcChans == DevFmtMono && dstChans == DevFmtStereo) ||
(srcChans == DevFmtStereo && dstChans == DevFmtMono)))
return NULL;
converter = al_calloc(DEF_ALIGN, sizeof(*converter));
converter->mSrcType = srcType;
converter->mSrcChans = srcChans;
converter->mDstChans = dstChans;
return converter;
}
void DestroyChannelConverter(ChannelConverter **converter)
{
if(converter)
{
al_free(*converter);
*converter = NULL;
}
}
#define DECL_TEMPLATE(T) \
static void Mono2Stereo##T(ALfloat *restrict dst, const T *src, ALsizei frames)\
{ \
ALsizei i; \
for(i = 0;i < frames;i++) \
dst[i*2 + 1] = dst[i*2 + 0] = Sample_##T(src[i]) * 0.707106781187f; \
} \
\
static void Stereo2Mono##T(ALfloat *restrict dst, const T *src, ALsizei frames)\
{ \
ALsizei i; \
for(i = 0;i < frames;i++) \
dst[i] = (Sample_##T(src[i*2 + 0])+Sample_##T(src[i*2 + 1])) * \
0.707106781187f; \
}
DECL_TEMPLATE(ALbyte)
DECL_TEMPLATE(ALubyte)
DECL_TEMPLATE(ALshort)
DECL_TEMPLATE(ALushort)
DECL_TEMPLATE(ALint)
DECL_TEMPLATE(ALuint)
DECL_TEMPLATE(ALfloat)
#undef DECL_TEMPLATE
void ChannelConverterInput(ChannelConverter *converter, const ALvoid *src, ALfloat *dst, ALsizei frames)
{
if(converter->mSrcChans == converter->mDstChans)
{
LoadSamples(dst, src, 1, converter->mSrcType,
frames*ChannelsFromDevFmt(converter->mSrcChans, 0));
return;
}
if(converter->mSrcChans == DevFmtStereo && converter->mDstChans == DevFmtMono)
{
switch(converter->mSrcType)
{
case DevFmtByte:
Stereo2MonoALbyte(dst, src, frames);
break;
case DevFmtUByte:
Stereo2MonoALubyte(dst, src, frames);
break;
case DevFmtShort:
Stereo2MonoALshort(dst, src, frames);
break;
case DevFmtUShort:
Stereo2MonoALushort(dst, src, frames);
break;
case DevFmtInt:
Stereo2MonoALint(dst, src, frames);
break;
case DevFmtUInt:
Stereo2MonoALuint(dst, src, frames);
break;
case DevFmtFloat:
Stereo2MonoALfloat(dst, src, frames);
break;
}
}
else /*if(converter->mSrcChans == DevFmtMono && converter->mDstChans == DevFmtStereo)*/
{
switch(converter->mSrcType)
{
case DevFmtByte:
Mono2StereoALbyte(dst, src, frames);
break;
case DevFmtUByte:
Mono2StereoALubyte(dst, src, frames);
break;
case DevFmtShort:
Mono2StereoALshort(dst, src, frames);
break;
case DevFmtUShort:
Mono2StereoALushort(dst, src, frames);
break;
case DevFmtInt:
Mono2StereoALint(dst, src, frames);
break;
case DevFmtUInt:
Mono2StereoALuint(dst, src, frames);
break;
case DevFmtFloat:
Mono2StereoALfloat(dst, src, frames);
break;
}
}
}

View file

@ -0,0 +1,371 @@
#include "config.h"
#include "converter.h"
#include <algorithm>
#include <cmath>
#include <cstdint>
#include <iterator>
#include <limits.h>
#include "albit.h"
#include "albyte.h"
#include "alnumeric.h"
#include "core/fpu_ctrl.h"
struct CTag;
struct CopyTag;
namespace {
constexpr uint MaxPitch{10};
static_assert((BufferLineSize-1)/MaxPitch > 0, "MaxPitch is too large for BufferLineSize!");
static_assert((INT_MAX>>MixerFracBits)/MaxPitch > BufferLineSize,
"MaxPitch and/or BufferLineSize are too large for MixerFracBits!");
/* Base template left undefined. Should be marked =delete, but Clang 3.8.1
* chokes on that given the inline specializations.
*/
template<DevFmtType T>
inline float LoadSample(DevFmtType_t<T> val) noexcept;
template<> inline float LoadSample<DevFmtByte>(DevFmtType_t<DevFmtByte> val) noexcept
{ return val * (1.0f/128.0f); }
template<> inline float LoadSample<DevFmtShort>(DevFmtType_t<DevFmtShort> val) noexcept
{ return val * (1.0f/32768.0f); }
template<> inline float LoadSample<DevFmtInt>(DevFmtType_t<DevFmtInt> val) noexcept
{ return static_cast<float>(val) * (1.0f/2147483648.0f); }
template<> inline float LoadSample<DevFmtFloat>(DevFmtType_t<DevFmtFloat> val) noexcept
{ return val; }
template<> inline float LoadSample<DevFmtUByte>(DevFmtType_t<DevFmtUByte> val) noexcept
{ return LoadSample<DevFmtByte>(static_cast<int8_t>(val - 128)); }
template<> inline float LoadSample<DevFmtUShort>(DevFmtType_t<DevFmtUShort> val) noexcept
{ return LoadSample<DevFmtShort>(static_cast<int16_t>(val - 32768)); }
template<> inline float LoadSample<DevFmtUInt>(DevFmtType_t<DevFmtUInt> val) noexcept
{ return LoadSample<DevFmtInt>(static_cast<int32_t>(val - 2147483648u)); }
template<DevFmtType T>
inline void LoadSampleArray(float *RESTRICT dst, const void *src, const size_t srcstep,
const size_t samples) noexcept
{
const DevFmtType_t<T> *ssrc = static_cast<const DevFmtType_t<T>*>(src);
for(size_t i{0u};i < samples;i++)
dst[i] = LoadSample<T>(ssrc[i*srcstep]);
}
void LoadSamples(float *dst, const void *src, const size_t srcstep, const DevFmtType srctype,
const size_t samples) noexcept
{
#define HANDLE_FMT(T) \
case T: LoadSampleArray<T>(dst, src, srcstep, samples); break
switch(srctype)
{
HANDLE_FMT(DevFmtByte);
HANDLE_FMT(DevFmtUByte);
HANDLE_FMT(DevFmtShort);
HANDLE_FMT(DevFmtUShort);
HANDLE_FMT(DevFmtInt);
HANDLE_FMT(DevFmtUInt);
HANDLE_FMT(DevFmtFloat);
}
#undef HANDLE_FMT
}
template<DevFmtType T>
inline DevFmtType_t<T> StoreSample(float) noexcept;
template<> inline float StoreSample<DevFmtFloat>(float val) noexcept
{ return val; }
template<> inline int32_t StoreSample<DevFmtInt>(float val) noexcept
{ return fastf2i(clampf(val*2147483648.0f, -2147483648.0f, 2147483520.0f)); }
template<> inline int16_t StoreSample<DevFmtShort>(float val) noexcept
{ return static_cast<int16_t>(fastf2i(clampf(val*32768.0f, -32768.0f, 32767.0f))); }
template<> inline int8_t StoreSample<DevFmtByte>(float val) noexcept
{ return static_cast<int8_t>(fastf2i(clampf(val*128.0f, -128.0f, 127.0f))); }
/* Define unsigned output variations. */
template<> inline uint32_t StoreSample<DevFmtUInt>(float val) noexcept
{ return static_cast<uint32_t>(StoreSample<DevFmtInt>(val)) + 2147483648u; }
template<> inline uint16_t StoreSample<DevFmtUShort>(float val) noexcept
{ return static_cast<uint16_t>(StoreSample<DevFmtShort>(val) + 32768); }
template<> inline uint8_t StoreSample<DevFmtUByte>(float val) noexcept
{ return static_cast<uint8_t>(StoreSample<DevFmtByte>(val) + 128); }
template<DevFmtType T>
inline void StoreSampleArray(void *dst, const float *RESTRICT src, const size_t dststep,
const size_t samples) noexcept
{
DevFmtType_t<T> *sdst = static_cast<DevFmtType_t<T>*>(dst);
for(size_t i{0u};i < samples;i++)
sdst[i*dststep] = StoreSample<T>(src[i]);
}
void StoreSamples(void *dst, const float *src, const size_t dststep, const DevFmtType dsttype,
const size_t samples) noexcept
{
#define HANDLE_FMT(T) \
case T: StoreSampleArray<T>(dst, src, dststep, samples); break
switch(dsttype)
{
HANDLE_FMT(DevFmtByte);
HANDLE_FMT(DevFmtUByte);
HANDLE_FMT(DevFmtShort);
HANDLE_FMT(DevFmtUShort);
HANDLE_FMT(DevFmtInt);
HANDLE_FMT(DevFmtUInt);
HANDLE_FMT(DevFmtFloat);
}
#undef HANDLE_FMT
}
template<DevFmtType T>
void Mono2Stereo(float *RESTRICT dst, const void *src, const size_t frames) noexcept
{
const DevFmtType_t<T> *ssrc = static_cast<const DevFmtType_t<T>*>(src);
for(size_t i{0u};i < frames;i++)
dst[i*2 + 1] = dst[i*2 + 0] = LoadSample<T>(ssrc[i]) * 0.707106781187f;
}
template<DevFmtType T>
void Multi2Mono(uint chanmask, const size_t step, const float scale, float *RESTRICT dst,
const void *src, const size_t frames) noexcept
{
const DevFmtType_t<T> *ssrc = static_cast<const DevFmtType_t<T>*>(src);
std::fill_n(dst, frames, 0.0f);
for(size_t c{0};chanmask;++c)
{
if LIKELY((chanmask&1))
{
for(size_t i{0u};i < frames;i++)
dst[i] += LoadSample<T>(ssrc[i*step + c]);
}
chanmask >>= 1;
}
for(size_t i{0u};i < frames;i++)
dst[i] *= scale;
}
} // namespace
SampleConverterPtr CreateSampleConverter(DevFmtType srcType, DevFmtType dstType, size_t numchans,
uint srcRate, uint dstRate, Resampler resampler)
{
if(numchans < 1 || srcRate < 1 || dstRate < 1)
return nullptr;
SampleConverterPtr converter{new(FamCount(numchans)) SampleConverter{numchans}};
converter->mSrcType = srcType;
converter->mDstType = dstType;
converter->mSrcTypeSize = BytesFromDevFmt(srcType);
converter->mDstTypeSize = BytesFromDevFmt(dstType);
converter->mSrcPrepCount = 0;
converter->mFracOffset = 0;
/* Have to set the mixer FPU mode since that's what the resampler code expects. */
FPUCtl mixer_mode{};
auto step = static_cast<uint>(
mind(srcRate*double{MixerFracOne}/dstRate + 0.5, MaxPitch*MixerFracOne));
converter->mIncrement = maxu(step, 1);
if(converter->mIncrement == MixerFracOne)
converter->mResample = Resample_<CopyTag,CTag>;
else
converter->mResample = PrepareResampler(resampler, converter->mIncrement,
&converter->mState);
return converter;
}
uint SampleConverter::availableOut(uint srcframes) const
{
int prepcount{mSrcPrepCount};
if(prepcount < 0)
{
/* Negative prepcount means we need to skip that many input samples. */
if(static_cast<uint>(-prepcount) >= srcframes)
return 0;
srcframes -= static_cast<uint>(-prepcount);
prepcount = 0;
}
if(srcframes < 1)
{
/* No output samples if there's no input samples. */
return 0;
}
if(prepcount < MaxResamplerPadding
&& static_cast<uint>(MaxResamplerPadding - prepcount) >= srcframes)
{
/* Not enough input samples to generate an output sample. */
return 0;
}
auto DataSize64 = static_cast<uint64_t>(prepcount);
DataSize64 += srcframes;
DataSize64 -= MaxResamplerPadding;
DataSize64 <<= MixerFracBits;
DataSize64 -= mFracOffset;
/* If we have a full prep, we can generate at least one sample. */
return static_cast<uint>(clampu64((DataSize64 + mIncrement-1)/mIncrement, 1,
std::numeric_limits<int>::max()));
}
uint SampleConverter::convert(const void **src, uint *srcframes, void *dst, uint dstframes)
{
const uint SrcFrameSize{static_cast<uint>(mChan.size()) * mSrcTypeSize};
const uint DstFrameSize{static_cast<uint>(mChan.size()) * mDstTypeSize};
const uint increment{mIncrement};
auto SamplesIn = static_cast<const al::byte*>(*src);
uint NumSrcSamples{*srcframes};
FPUCtl mixer_mode{};
uint pos{0};
while(pos < dstframes && NumSrcSamples > 0)
{
int prepcount{mSrcPrepCount};
if(prepcount < 0)
{
/* Negative prepcount means we need to skip that many input samples. */
if(static_cast<uint>(-prepcount) >= NumSrcSamples)
{
mSrcPrepCount = static_cast<int>(NumSrcSamples) + prepcount;
NumSrcSamples = 0;
break;
}
SamplesIn += SrcFrameSize*static_cast<uint>(-prepcount);
NumSrcSamples -= static_cast<uint>(-prepcount);
mSrcPrepCount = 0;
continue;
}
const uint toread{minu(NumSrcSamples, BufferLineSize - MaxResamplerPadding)};
if(prepcount < MaxResamplerPadding
&& static_cast<uint>(MaxResamplerPadding - prepcount) >= toread)
{
/* Not enough input samples to generate an output sample. Store
* what we're given for later.
*/
for(size_t chan{0u};chan < mChan.size();chan++)
LoadSamples(&mChan[chan].PrevSamples[prepcount], SamplesIn + mSrcTypeSize*chan,
mChan.size(), mSrcType, toread);
mSrcPrepCount = prepcount + static_cast<int>(toread);
NumSrcSamples = 0;
break;
}
float *RESTRICT SrcData{mSrcSamples};
float *RESTRICT DstData{mDstSamples};
uint DataPosFrac{mFracOffset};
auto DataSize64 = static_cast<uint64_t>(prepcount);
DataSize64 += toread;
DataSize64 -= MaxResamplerPadding;
DataSize64 <<= MixerFracBits;
DataSize64 -= DataPosFrac;
/* If we have a full prep, we can generate at least one sample. */
auto DstSize = static_cast<uint>(
clampu64((DataSize64 + increment-1)/increment, 1, BufferLineSize));
DstSize = minu(DstSize, dstframes-pos);
for(size_t chan{0u};chan < mChan.size();chan++)
{
const al::byte *SrcSamples{SamplesIn + mSrcTypeSize*chan};
al::byte *DstSamples = static_cast<al::byte*>(dst) + mDstTypeSize*chan;
/* Load the previous samples into the source data first, then the
* new samples from the input buffer.
*/
std::copy_n(mChan[chan].PrevSamples, prepcount, SrcData);
LoadSamples(SrcData + prepcount, SrcSamples, mChan.size(), mSrcType, toread);
/* Store as many prep samples for next time as possible, given the
* number of output samples being generated.
*/
uint SrcDataEnd{(DstSize*increment + DataPosFrac)>>MixerFracBits};
if(SrcDataEnd >= static_cast<uint>(prepcount)+toread)
std::fill(std::begin(mChan[chan].PrevSamples),
std::end(mChan[chan].PrevSamples), 0.0f);
else
{
const size_t len{minz(al::size(mChan[chan].PrevSamples),
static_cast<uint>(prepcount)+toread-SrcDataEnd)};
std::copy_n(SrcData+SrcDataEnd, len, mChan[chan].PrevSamples);
std::fill(std::begin(mChan[chan].PrevSamples)+len,
std::end(mChan[chan].PrevSamples), 0.0f);
}
/* Now resample, and store the result in the output buffer. */
const float *ResampledData{mResample(&mState, SrcData+(MaxResamplerPadding>>1),
DataPosFrac, increment, {DstData, DstSize})};
StoreSamples(DstSamples, ResampledData, mChan.size(), mDstType, DstSize);
}
/* Update the number of prep samples still available, as well as the
* fractional offset.
*/
DataPosFrac += increment*DstSize;
mSrcPrepCount = mini(prepcount + static_cast<int>(toread - (DataPosFrac>>MixerFracBits)),
MaxResamplerPadding);
mFracOffset = DataPosFrac & MixerFracMask;
/* Update the src and dst pointers in case there's still more to do. */
SamplesIn += SrcFrameSize*(DataPosFrac>>MixerFracBits);
NumSrcSamples -= minu(NumSrcSamples, (DataPosFrac>>MixerFracBits));
dst = static_cast<al::byte*>(dst) + DstFrameSize*DstSize;
pos += DstSize;
}
*src = SamplesIn;
*srcframes = NumSrcSamples;
return pos;
}
void ChannelConverter::convert(const void *src, float *dst, uint frames) const
{
if(mDstChans == DevFmtMono)
{
const float scale{std::sqrt(1.0f / static_cast<float>(al::popcount(mChanMask)))};
switch(mSrcType)
{
#define HANDLE_FMT(T) case T: Multi2Mono<T>(mChanMask, mSrcStep, scale, dst, src, frames); break
HANDLE_FMT(DevFmtByte);
HANDLE_FMT(DevFmtUByte);
HANDLE_FMT(DevFmtShort);
HANDLE_FMT(DevFmtUShort);
HANDLE_FMT(DevFmtInt);
HANDLE_FMT(DevFmtUInt);
HANDLE_FMT(DevFmtFloat);
#undef HANDLE_FMT
}
}
else if(mChanMask == 0x1 && mDstChans == DevFmtStereo)
{
switch(mSrcType)
{
#define HANDLE_FMT(T) case T: Mono2Stereo<T>(dst, src, frames); break
HANDLE_FMT(DevFmtByte);
HANDLE_FMT(DevFmtUByte);
HANDLE_FMT(DevFmtShort);
HANDLE_FMT(DevFmtUShort);
HANDLE_FMT(DevFmtInt);
HANDLE_FMT(DevFmtUInt);
HANDLE_FMT(DevFmtFloat);
#undef HANDLE_FMT
}
}
}

View file

@ -1,55 +1,59 @@
#ifndef CONVERTER_H
#define CONVERTER_H
#include "alMain.h"
#include "alu.h"
#include <cstddef>
#include <memory>
#ifdef __cpluspluc
extern "C" {
#endif
#include "almalloc.h"
#include "core/devformat.h"
#include "core/mixer/defs.h"
typedef struct SampleConverter {
enum DevFmtType mSrcType;
enum DevFmtType mDstType;
ALsizei mNumChannels;
ALsizei mSrcTypeSize;
ALsizei mDstTypeSize;
ALint mSrcPrepCount;
ALsizei mFracOffset;
ALsizei mIncrement;
InterpState mState;
ResamplerFunc mResample;
alignas(16) ALfloat mSrcSamples[BUFFERSIZE];
alignas(16) ALfloat mDstSamples[BUFFERSIZE];
struct {
alignas(16) ALfloat mPrevSamples[MAX_RESAMPLE_PADDING*2];
} Chan[];
} SampleConverter;
SampleConverter *CreateSampleConverter(enum DevFmtType srcType, enum DevFmtType dstType, ALsizei numchans, ALsizei srcRate, ALsizei dstRate);
void DestroySampleConverter(SampleConverter **converter);
ALsizei SampleConverterInput(SampleConverter *converter, const ALvoid **src, ALsizei *srcframes, ALvoid *dst, ALsizei dstframes);
ALsizei SampleConverterAvailableOut(SampleConverter *converter, ALsizei srcframes);
using uint = unsigned int;
typedef struct ChannelConverter {
enum DevFmtType mSrcType;
enum DevFmtChannels mSrcChans;
enum DevFmtChannels mDstChans;
} ChannelConverter;
struct SampleConverter {
DevFmtType mSrcType{};
DevFmtType mDstType{};
uint mSrcTypeSize{};
uint mDstTypeSize{};
ChannelConverter *CreateChannelConverter(enum DevFmtType srcType, enum DevFmtChannels srcChans, enum DevFmtChannels dstChans);
void DestroyChannelConverter(ChannelConverter **converter);
int mSrcPrepCount{};
void ChannelConverterInput(ChannelConverter *converter, const ALvoid *src, ALfloat *dst, ALsizei frames);
uint mFracOffset{};
uint mIncrement{};
InterpState mState{};
ResamplerFunc mResample{};
#ifdef __cpluspluc
}
#endif
alignas(16) float mSrcSamples[BufferLineSize]{};
alignas(16) float mDstSamples[BufferLineSize]{};
struct ChanSamples {
alignas(16) float PrevSamples[MaxResamplerPadding];
};
al::FlexArray<ChanSamples> mChan;
SampleConverter(size_t numchans) : mChan{numchans} { }
uint convert(const void **src, uint *srcframes, void *dst, uint dstframes);
uint availableOut(uint srcframes) const;
DEF_FAM_NEWDEL(SampleConverter, mChan)
};
using SampleConverterPtr = std::unique_ptr<SampleConverter>;
SampleConverterPtr CreateSampleConverter(DevFmtType srcType, DevFmtType dstType, size_t numchans,
uint srcRate, uint dstRate, Resampler resampler);
struct ChannelConverter {
DevFmtType mSrcType{};
uint mSrcStep{};
uint mChanMask{};
DevFmtChannels mDstChans{};
bool is_active() const noexcept { return mChanMask != 0; }
void convert(const void *src, float *dst, uint frames) const;
};
#endif /* CONVERTER_H */

View file

@ -1,15 +0,0 @@
#ifndef CPU_CAPS_H
#define CPU_CAPS_H
extern int CPUCapFlags;
enum {
CPU_CAP_SSE = 1<<0,
CPU_CAP_SSE2 = 1<<1,
CPU_CAP_SSE3 = 1<<2,
CPU_CAP_SSE4_1 = 1<<3,
CPU_CAP_NEON = 1<<4,
};
void FillCPUCaps(int capfilter);
#endif /* CPU_CAPS_H */

View file

@ -0,0 +1,212 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2018 by Raul Herraiz.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <cmath>
#include <cstdlib>
#include <algorithm>
#include "alcmain.h"
#include "alcontext.h"
#include "core/filters/biquad.h"
#include "effectslot.h"
#include "vecmat.h"
namespace {
constexpr float GainScale{31621.0f};
constexpr float MinFreq{20.0f};
constexpr float MaxFreq{2500.0f};
constexpr float QFactor{5.0f};
struct AutowahState final : public EffectState {
/* Effect parameters */
float mAttackRate;
float mReleaseRate;
float mResonanceGain;
float mPeakGain;
float mFreqMinNorm;
float mBandwidthNorm;
float mEnvDelay;
/* Filter components derived from the envelope. */
struct {
float cos_w0;
float alpha;
} mEnv[BufferLineSize];
struct {
/* Effect filters' history. */
struct {
float z1, z2;
} Filter;
/* Effect gains for each output channel */
float CurrentGains[MAX_OUTPUT_CHANNELS];
float TargetGains[MAX_OUTPUT_CHANNELS];
} mChans[MaxAmbiChannels];
/* Effects buffers */
alignas(16) float mBufferOut[BufferLineSize];
void deviceUpdate(const ALCdevice *device, const Buffer &buffer) override;
void update(const ALCcontext *context, const EffectSlot *slot, const EffectProps *props,
const EffectTarget target) override;
void process(const size_t samplesToDo, const al::span<const FloatBufferLine> samplesIn,
const al::span<FloatBufferLine> samplesOut) override;
DEF_NEWDEL(AutowahState)
};
void AutowahState::deviceUpdate(const ALCdevice*, const Buffer&)
{
/* (Re-)initializing parameters and clear the buffers. */
mAttackRate = 1.0f;
mReleaseRate = 1.0f;
mResonanceGain = 10.0f;
mPeakGain = 4.5f;
mFreqMinNorm = 4.5e-4f;
mBandwidthNorm = 0.05f;
mEnvDelay = 0.0f;
for(auto &e : mEnv)
{
e.cos_w0 = 0.0f;
e.alpha = 0.0f;
}
for(auto &chan : mChans)
{
std::fill(std::begin(chan.CurrentGains), std::end(chan.CurrentGains), 0.0f);
chan.Filter.z1 = 0.0f;
chan.Filter.z2 = 0.0f;
}
}
void AutowahState::update(const ALCcontext *context, const EffectSlot *slot,
const EffectProps *props, const EffectTarget target)
{
const ALCdevice *device{context->mDevice.get()};
const auto frequency = static_cast<float>(device->Frequency);
const float ReleaseTime{clampf(props->Autowah.ReleaseTime, 0.001f, 1.0f)};
mAttackRate = std::exp(-1.0f / (props->Autowah.AttackTime*frequency));
mReleaseRate = std::exp(-1.0f / (ReleaseTime*frequency));
/* 0-20dB Resonance Peak gain */
mResonanceGain = std::sqrt(std::log10(props->Autowah.Resonance)*10.0f / 3.0f);
mPeakGain = 1.0f - std::log10(props->Autowah.PeakGain / GainScale);
mFreqMinNorm = MinFreq / frequency;
mBandwidthNorm = (MaxFreq-MinFreq) / frequency;
mOutTarget = target.Main->Buffer;
auto set_gains = [slot,target](auto &chan, al::span<const float,MaxAmbiChannels> coeffs)
{ ComputePanGains(target.Main, coeffs.data(), slot->Gain, chan.TargetGains); };
SetAmbiPanIdentity(std::begin(mChans), slot->Wet.Buffer.size(), set_gains);
}
void AutowahState::process(const size_t samplesToDo,
const al::span<const FloatBufferLine> samplesIn, const al::span<FloatBufferLine> samplesOut)
{
const float attack_rate{mAttackRate};
const float release_rate{mReleaseRate};
const float res_gain{mResonanceGain};
const float peak_gain{mPeakGain};
const float freq_min{mFreqMinNorm};
const float bandwidth{mBandwidthNorm};
float env_delay{mEnvDelay};
for(size_t i{0u};i < samplesToDo;i++)
{
float w0, sample, a;
/* Envelope follower described on the book: Audio Effects, Theory,
* Implementation and Application.
*/
sample = peak_gain * std::fabs(samplesIn[0][i]);
a = (sample > env_delay) ? attack_rate : release_rate;
env_delay = lerp(sample, env_delay, a);
/* Calculate the cos and alpha components for this sample's filter. */
w0 = minf((bandwidth*env_delay + freq_min), 0.46f) * al::MathDefs<float>::Tau();
mEnv[i].cos_w0 = std::cos(w0);
mEnv[i].alpha = std::sin(w0)/(2.0f * QFactor);
}
mEnvDelay = env_delay;
auto chandata = std::addressof(mChans[0]);
for(const auto &insamples : samplesIn)
{
/* This effectively inlines BiquadFilter_setParams for a peaking
* filter and BiquadFilter_processC. The alpha and cosine components
* for the filter coefficients were previously calculated with the
* envelope. Because the filter changes for each sample, the
* coefficients are transient and don't need to be held.
*/
float z1{chandata->Filter.z1};
float z2{chandata->Filter.z2};
for(size_t i{0u};i < samplesToDo;i++)
{
const float alpha{mEnv[i].alpha};
const float cos_w0{mEnv[i].cos_w0};
float input, output;
float a[3], b[3];
b[0] = 1.0f + alpha*res_gain;
b[1] = -2.0f * cos_w0;
b[2] = 1.0f - alpha*res_gain;
a[0] = 1.0f + alpha/res_gain;
a[1] = -2.0f * cos_w0;
a[2] = 1.0f - alpha/res_gain;
input = insamples[i];
output = input*(b[0]/a[0]) + z1;
z1 = input*(b[1]/a[0]) - output*(a[1]/a[0]) + z2;
z2 = input*(b[2]/a[0]) - output*(a[2]/a[0]);
mBufferOut[i] = output;
}
chandata->Filter.z1 = z1;
chandata->Filter.z2 = z2;
/* Now, mix the processed sound data to the output. */
MixSamples({mBufferOut, samplesToDo}, samplesOut, chandata->CurrentGains,
chandata->TargetGains, samplesToDo, 0);
++chandata;
}
}
struct AutowahStateFactory final : public EffectStateFactory {
al::intrusive_ptr<EffectState> create() override
{ return al::intrusive_ptr<EffectState>{new AutowahState{}}; }
};
} // namespace
EffectStateFactory *AutowahStateFactory_getFactory()
{
static AutowahStateFactory AutowahFactory{};
return &AutowahFactory;
}

View file

@ -0,0 +1,212 @@
#ifndef EFFECTS_BASE_H
#define EFFECTS_BASE_H
#include <cstddef>
#include "albyte.h"
#include "alcmain.h"
#include "almalloc.h"
#include "alspan.h"
#include "atomic.h"
#include "intrusive_ptr.h"
struct EffectSlot;
struct BufferStorage;
enum class ChorusWaveform {
Sinusoid,
Triangle
};
constexpr float EchoMaxDelay{0.207f};
constexpr float EchoMaxLRDelay{0.404f};
enum class FShifterDirection {
Down,
Up,
Off
};
enum class ModulatorWaveform {
Sinusoid,
Sawtooth,
Square
};
enum class VMorpherPhenome {
A, E, I, O, U,
AA, AE, AH, AO, EH, ER, IH, IY, UH, UW,
B, D, F, G, J, K, L, M, N, P, R, S, T, V, Z
};
enum class VMorpherWaveform {
Sinusoid,
Triangle,
Sawtooth
};
union EffectProps {
struct {
// Shared Reverb Properties
float Density;
float Diffusion;
float Gain;
float GainHF;
float DecayTime;
float DecayHFRatio;
float ReflectionsGain;
float ReflectionsDelay;
float LateReverbGain;
float LateReverbDelay;
float AirAbsorptionGainHF;
float RoomRolloffFactor;
bool DecayHFLimit;
// Additional EAX Reverb Properties
float GainLF;
float DecayLFRatio;
float ReflectionsPan[3];
float LateReverbPan[3];
float EchoTime;
float EchoDepth;
float ModulationTime;
float ModulationDepth;
float HFReference;
float LFReference;
} Reverb;
struct {
float AttackTime;
float ReleaseTime;
float Resonance;
float PeakGain;
} Autowah;
struct {
ChorusWaveform Waveform;
int Phase;
float Rate;
float Depth;
float Feedback;
float Delay;
} Chorus; /* Also Flanger */
struct {
bool OnOff;
} Compressor;
struct {
float Edge;
float Gain;
float LowpassCutoff;
float EQCenter;
float EQBandwidth;
} Distortion;
struct {
float Delay;
float LRDelay;
float Damping;
float Feedback;
float Spread;
} Echo;
struct {
float LowCutoff;
float LowGain;
float Mid1Center;
float Mid1Gain;
float Mid1Width;
float Mid2Center;
float Mid2Gain;
float Mid2Width;
float HighCutoff;
float HighGain;
} Equalizer;
struct {
float Frequency;
FShifterDirection LeftDirection;
FShifterDirection RightDirection;
} Fshifter;
struct {
float Frequency;
float HighPassCutoff;
ModulatorWaveform Waveform;
} Modulator;
struct {
int CoarseTune;
int FineTune;
} Pshifter;
struct {
float Rate;
VMorpherPhenome PhonemeA;
VMorpherPhenome PhonemeB;
int PhonemeACoarseTuning;
int PhonemeBCoarseTuning;
VMorpherWaveform Waveform;
} Vmorpher;
struct {
float Gain;
} Dedicated;
};
struct EffectTarget {
MixParams *Main;
RealMixParams *RealOut;
};
struct EffectState : public al::intrusive_ref<EffectState> {
struct Buffer {
const BufferStorage *storage;
al::span<const al::byte> samples;
};
al::span<FloatBufferLine> mOutTarget;
virtual ~EffectState() = default;
virtual void deviceUpdate(const ALCdevice *device, const Buffer &buffer) = 0;
virtual void update(const ALCcontext *context, const EffectSlot *slot,
const EffectProps *props, const EffectTarget target) = 0;
virtual void process(const size_t samplesToDo, const al::span<const FloatBufferLine> samplesIn,
const al::span<FloatBufferLine> samplesOut) = 0;
};
struct EffectStateFactory {
virtual ~EffectStateFactory() = default;
virtual al::intrusive_ptr<EffectState> create() = 0;
};
EffectStateFactory *NullStateFactory_getFactory(void);
EffectStateFactory *ReverbStateFactory_getFactory(void);
EffectStateFactory *StdReverbStateFactory_getFactory(void);
EffectStateFactory *AutowahStateFactory_getFactory(void);
EffectStateFactory *ChorusStateFactory_getFactory(void);
EffectStateFactory *CompressorStateFactory_getFactory(void);
EffectStateFactory *DistortionStateFactory_getFactory(void);
EffectStateFactory *EchoStateFactory_getFactory(void);
EffectStateFactory *EqualizerStateFactory_getFactory(void);
EffectStateFactory *FlangerStateFactory_getFactory(void);
EffectStateFactory *FshifterStateFactory_getFactory(void);
EffectStateFactory *ModulatorStateFactory_getFactory(void);
EffectStateFactory *PshifterStateFactory_getFactory(void);
EffectStateFactory* VmorpherStateFactory_getFactory(void);
EffectStateFactory *DedicatedStateFactory_getFactory(void);
EffectStateFactory *ConvolutionStateFactory_getFactory(void);
#endif /* EFFECTS_BASE_H */

View file

@ -1,555 +0,0 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2013 by Mike Gorchak
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <math.h>
#include <stdlib.h>
#include "alMain.h"
#include "alAuxEffectSlot.h"
#include "alError.h"
#include "alu.h"
#include "filters/defs.h"
static_assert(AL_CHORUS_WAVEFORM_SINUSOID == AL_FLANGER_WAVEFORM_SINUSOID, "Chorus/Flanger waveform value mismatch");
static_assert(AL_CHORUS_WAVEFORM_TRIANGLE == AL_FLANGER_WAVEFORM_TRIANGLE, "Chorus/Flanger waveform value mismatch");
enum WaveForm {
WF_Sinusoid,
WF_Triangle
};
typedef struct ALchorusState {
DERIVE_FROM_TYPE(ALeffectState);
ALfloat *SampleBuffer;
ALsizei BufferLength;
ALsizei offset;
ALsizei lfo_offset;
ALsizei lfo_range;
ALfloat lfo_scale;
ALint lfo_disp;
/* Gains for left and right sides */
struct {
ALfloat Current[MAX_OUTPUT_CHANNELS];
ALfloat Target[MAX_OUTPUT_CHANNELS];
} Gains[2];
/* effect parameters */
enum WaveForm waveform;
ALint delay;
ALfloat depth;
ALfloat feedback;
} ALchorusState;
static ALvoid ALchorusState_Destruct(ALchorusState *state);
static ALboolean ALchorusState_deviceUpdate(ALchorusState *state, ALCdevice *Device);
static ALvoid ALchorusState_update(ALchorusState *state, const ALCcontext *Context, const ALeffectslot *Slot, const ALeffectProps *props);
static ALvoid ALchorusState_process(ALchorusState *state, ALsizei SamplesToDo, const ALfloat (*restrict SamplesIn)[BUFFERSIZE], ALfloat (*restrict SamplesOut)[BUFFERSIZE], ALsizei NumChannels);
DECLARE_DEFAULT_ALLOCATORS(ALchorusState)
DEFINE_ALEFFECTSTATE_VTABLE(ALchorusState);
static void ALchorusState_Construct(ALchorusState *state)
{
ALeffectState_Construct(STATIC_CAST(ALeffectState, state));
SET_VTABLE2(ALchorusState, ALeffectState, state);
state->BufferLength = 0;
state->SampleBuffer = NULL;
state->offset = 0;
state->lfo_offset = 0;
state->lfo_range = 1;
state->waveform = WF_Triangle;
}
static ALvoid ALchorusState_Destruct(ALchorusState *state)
{
al_free(state->SampleBuffer);
state->SampleBuffer = NULL;
ALeffectState_Destruct(STATIC_CAST(ALeffectState,state));
}
static ALboolean ALchorusState_deviceUpdate(ALchorusState *state, ALCdevice *Device)
{
const ALfloat max_delay = maxf(AL_CHORUS_MAX_DELAY, AL_FLANGER_MAX_DELAY);
ALsizei maxlen;
maxlen = NextPowerOf2(float2int(max_delay*2.0f*Device->Frequency) + 1u);
if(maxlen <= 0) return AL_FALSE;
if(maxlen != state->BufferLength)
{
void *temp = al_calloc(16, maxlen * sizeof(ALfloat));
if(!temp) return AL_FALSE;
al_free(state->SampleBuffer);
state->SampleBuffer = temp;
state->BufferLength = maxlen;
}
memset(state->SampleBuffer, 0, state->BufferLength*sizeof(ALfloat));
memset(state->Gains, 0, sizeof(state->Gains));
return AL_TRUE;
}
static ALvoid ALchorusState_update(ALchorusState *state, const ALCcontext *Context, const ALeffectslot *Slot, const ALeffectProps *props)
{
const ALsizei mindelay = MAX_RESAMPLE_PADDING << FRACTIONBITS;
const ALCdevice *device = Context->Device;
ALfloat frequency = (ALfloat)device->Frequency;
ALfloat coeffs[MAX_AMBI_COEFFS];
ALfloat rate;
ALint phase;
switch(props->Chorus.Waveform)
{
case AL_CHORUS_WAVEFORM_TRIANGLE:
state->waveform = WF_Triangle;
break;
case AL_CHORUS_WAVEFORM_SINUSOID:
state->waveform = WF_Sinusoid;
break;
}
/* The LFO depth is scaled to be relative to the sample delay. Clamp the
* delay and depth to allow enough padding for resampling.
*/
state->delay = maxi(float2int(props->Chorus.Delay*frequency*FRACTIONONE + 0.5f),
mindelay);
state->depth = minf(props->Chorus.Depth * state->delay,
(ALfloat)(state->delay - mindelay));
state->feedback = props->Chorus.Feedback;
/* Gains for left and right sides */
CalcAngleCoeffs(-F_PI_2, 0.0f, 0.0f, coeffs);
ComputeDryPanGains(&device->Dry, coeffs, Slot->Params.Gain, state->Gains[0].Target);
CalcAngleCoeffs( F_PI_2, 0.0f, 0.0f, coeffs);
ComputeDryPanGains(&device->Dry, coeffs, Slot->Params.Gain, state->Gains[1].Target);
phase = props->Chorus.Phase;
rate = props->Chorus.Rate;
if(!(rate > 0.0f))
{
state->lfo_offset = 0;
state->lfo_range = 1;
state->lfo_scale = 0.0f;
state->lfo_disp = 0;
}
else
{
/* Calculate LFO coefficient (number of samples per cycle). Limit the
* max range to avoid overflow when calculating the displacement.
*/
ALsizei lfo_range = float2int(minf(frequency/rate + 0.5f, (ALfloat)(INT_MAX/360 - 180)));
state->lfo_offset = float2int((ALfloat)state->lfo_offset/state->lfo_range*
lfo_range + 0.5f) % lfo_range;
state->lfo_range = lfo_range;
switch(state->waveform)
{
case WF_Triangle:
state->lfo_scale = 4.0f / state->lfo_range;
break;
case WF_Sinusoid:
state->lfo_scale = F_TAU / state->lfo_range;
break;
}
/* Calculate lfo phase displacement */
if(phase < 0) phase = 360 + phase;
state->lfo_disp = (state->lfo_range*phase + 180) / 360;
}
}
static void GetTriangleDelays(ALint *restrict delays, ALsizei offset, const ALsizei lfo_range,
const ALfloat lfo_scale, const ALfloat depth, const ALsizei delay,
const ALsizei todo)
{
ALsizei i;
for(i = 0;i < todo;i++)
{
delays[i] = fastf2i((1.0f - fabsf(2.0f - lfo_scale*offset)) * depth) + delay;
offset = (offset+1)%lfo_range;
}
}
static void GetSinusoidDelays(ALint *restrict delays, ALsizei offset, const ALsizei lfo_range,
const ALfloat lfo_scale, const ALfloat depth, const ALsizei delay,
const ALsizei todo)
{
ALsizei i;
for(i = 0;i < todo;i++)
{
delays[i] = fastf2i(sinf(lfo_scale*offset) * depth) + delay;
offset = (offset+1)%lfo_range;
}
}
static ALvoid ALchorusState_process(ALchorusState *state, ALsizei SamplesToDo, const ALfloat (*restrict SamplesIn)[BUFFERSIZE], ALfloat (*restrict SamplesOut)[BUFFERSIZE], ALsizei NumChannels)
{
const ALsizei bufmask = state->BufferLength-1;
const ALfloat feedback = state->feedback;
const ALsizei avgdelay = (state->delay + (FRACTIONONE>>1)) >> FRACTIONBITS;
ALfloat *restrict delaybuf = state->SampleBuffer;
ALsizei offset = state->offset;
ALsizei i, c;
ALsizei base;
for(base = 0;base < SamplesToDo;)
{
const ALsizei todo = mini(256, SamplesToDo-base);
ALint moddelays[2][256];
alignas(16) ALfloat temps[2][256];
if(state->waveform == WF_Sinusoid)
{
GetSinusoidDelays(moddelays[0], state->lfo_offset, state->lfo_range, state->lfo_scale,
state->depth, state->delay, todo);
GetSinusoidDelays(moddelays[1], (state->lfo_offset+state->lfo_disp)%state->lfo_range,
state->lfo_range, state->lfo_scale, state->depth, state->delay,
todo);
}
else /*if(state->waveform == WF_Triangle)*/
{
GetTriangleDelays(moddelays[0], state->lfo_offset, state->lfo_range, state->lfo_scale,
state->depth, state->delay, todo);
GetTriangleDelays(moddelays[1], (state->lfo_offset+state->lfo_disp)%state->lfo_range,
state->lfo_range, state->lfo_scale, state->depth, state->delay,
todo);
}
state->lfo_offset = (state->lfo_offset+todo) % state->lfo_range;
for(i = 0;i < todo;i++)
{
ALint delay;
ALfloat mu;
// Feed the buffer's input first (necessary for delays < 1).
delaybuf[offset&bufmask] = SamplesIn[0][base+i];
// Tap for the left output.
delay = offset - (moddelays[0][i]>>FRACTIONBITS);
mu = (moddelays[0][i]&FRACTIONMASK) * (1.0f/FRACTIONONE);
temps[0][i] = cubic(delaybuf[(delay+1) & bufmask], delaybuf[(delay ) & bufmask],
delaybuf[(delay-1) & bufmask], delaybuf[(delay-2) & bufmask],
mu);
// Tap for the right output.
delay = offset - (moddelays[1][i]>>FRACTIONBITS);
mu = (moddelays[1][i]&FRACTIONMASK) * (1.0f/FRACTIONONE);
temps[1][i] = cubic(delaybuf[(delay+1) & bufmask], delaybuf[(delay ) & bufmask],
delaybuf[(delay-1) & bufmask], delaybuf[(delay-2) & bufmask],
mu);
// Accumulate feedback from the average delay of the taps.
delaybuf[offset&bufmask] += delaybuf[(offset-avgdelay) & bufmask] * feedback;
offset++;
}
for(c = 0;c < 2;c++)
MixSamples(temps[c], NumChannels, SamplesOut, state->Gains[c].Current,
state->Gains[c].Target, SamplesToDo-base, base, todo);
base += todo;
}
state->offset = offset;
}
typedef struct ChorusStateFactory {
DERIVE_FROM_TYPE(EffectStateFactory);
} ChorusStateFactory;
static ALeffectState *ChorusStateFactory_create(ChorusStateFactory *UNUSED(factory))
{
ALchorusState *state;
NEW_OBJ0(state, ALchorusState)();
if(!state) return NULL;
return STATIC_CAST(ALeffectState, state);
}
DEFINE_EFFECTSTATEFACTORY_VTABLE(ChorusStateFactory);
EffectStateFactory *ChorusStateFactory_getFactory(void)
{
static ChorusStateFactory ChorusFactory = { { GET_VTABLE2(ChorusStateFactory, EffectStateFactory) } };
return STATIC_CAST(EffectStateFactory, &ChorusFactory);
}
void ALchorus_setParami(ALeffect *effect, ALCcontext *context, ALenum param, ALint val)
{
ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_CHORUS_WAVEFORM:
if(!(val >= AL_CHORUS_MIN_WAVEFORM && val <= AL_CHORUS_MAX_WAVEFORM))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Invalid chorus waveform");
props->Chorus.Waveform = val;
break;
case AL_CHORUS_PHASE:
if(!(val >= AL_CHORUS_MIN_PHASE && val <= AL_CHORUS_MAX_PHASE))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Chorus phase out of range");
props->Chorus.Phase = val;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid chorus integer property 0x%04x", param);
}
}
void ALchorus_setParamiv(ALeffect *effect, ALCcontext *context, ALenum param, const ALint *vals)
{ ALchorus_setParami(effect, context, param, vals[0]); }
void ALchorus_setParamf(ALeffect *effect, ALCcontext *context, ALenum param, ALfloat val)
{
ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_CHORUS_RATE:
if(!(val >= AL_CHORUS_MIN_RATE && val <= AL_CHORUS_MAX_RATE))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Chorus rate out of range");
props->Chorus.Rate = val;
break;
case AL_CHORUS_DEPTH:
if(!(val >= AL_CHORUS_MIN_DEPTH && val <= AL_CHORUS_MAX_DEPTH))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Chorus depth out of range");
props->Chorus.Depth = val;
break;
case AL_CHORUS_FEEDBACK:
if(!(val >= AL_CHORUS_MIN_FEEDBACK && val <= AL_CHORUS_MAX_FEEDBACK))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Chorus feedback out of range");
props->Chorus.Feedback = val;
break;
case AL_CHORUS_DELAY:
if(!(val >= AL_CHORUS_MIN_DELAY && val <= AL_CHORUS_MAX_DELAY))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Chorus delay out of range");
props->Chorus.Delay = val;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid chorus float property 0x%04x", param);
}
}
void ALchorus_setParamfv(ALeffect *effect, ALCcontext *context, ALenum param, const ALfloat *vals)
{ ALchorus_setParamf(effect, context, param, vals[0]); }
void ALchorus_getParami(const ALeffect *effect, ALCcontext *context, ALenum param, ALint *val)
{
const ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_CHORUS_WAVEFORM:
*val = props->Chorus.Waveform;
break;
case AL_CHORUS_PHASE:
*val = props->Chorus.Phase;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid chorus integer property 0x%04x", param);
}
}
void ALchorus_getParamiv(const ALeffect *effect, ALCcontext *context, ALenum param, ALint *vals)
{ ALchorus_getParami(effect, context, param, vals); }
void ALchorus_getParamf(const ALeffect *effect, ALCcontext *context, ALenum param, ALfloat *val)
{
const ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_CHORUS_RATE:
*val = props->Chorus.Rate;
break;
case AL_CHORUS_DEPTH:
*val = props->Chorus.Depth;
break;
case AL_CHORUS_FEEDBACK:
*val = props->Chorus.Feedback;
break;
case AL_CHORUS_DELAY:
*val = props->Chorus.Delay;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid chorus float property 0x%04x", param);
}
}
void ALchorus_getParamfv(const ALeffect *effect, ALCcontext *context, ALenum param, ALfloat *vals)
{ ALchorus_getParamf(effect, context, param, vals); }
DEFINE_ALEFFECT_VTABLE(ALchorus);
/* Flanger is basically a chorus with a really short delay. They can both use
* the same processing functions, so piggyback flanger on the chorus functions.
*/
typedef struct FlangerStateFactory {
DERIVE_FROM_TYPE(EffectStateFactory);
} FlangerStateFactory;
ALeffectState *FlangerStateFactory_create(FlangerStateFactory *UNUSED(factory))
{
ALchorusState *state;
NEW_OBJ0(state, ALchorusState)();
if(!state) return NULL;
return STATIC_CAST(ALeffectState, state);
}
DEFINE_EFFECTSTATEFACTORY_VTABLE(FlangerStateFactory);
EffectStateFactory *FlangerStateFactory_getFactory(void)
{
static FlangerStateFactory FlangerFactory = { { GET_VTABLE2(FlangerStateFactory, EffectStateFactory) } };
return STATIC_CAST(EffectStateFactory, &FlangerFactory);
}
void ALflanger_setParami(ALeffect *effect, ALCcontext *context, ALenum param, ALint val)
{
ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_FLANGER_WAVEFORM:
if(!(val >= AL_FLANGER_MIN_WAVEFORM && val <= AL_FLANGER_MAX_WAVEFORM))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Invalid flanger waveform");
props->Chorus.Waveform = val;
break;
case AL_FLANGER_PHASE:
if(!(val >= AL_FLANGER_MIN_PHASE && val <= AL_FLANGER_MAX_PHASE))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Flanger phase out of range");
props->Chorus.Phase = val;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid flanger integer property 0x%04x", param);
}
}
void ALflanger_setParamiv(ALeffect *effect, ALCcontext *context, ALenum param, const ALint *vals)
{ ALflanger_setParami(effect, context, param, vals[0]); }
void ALflanger_setParamf(ALeffect *effect, ALCcontext *context, ALenum param, ALfloat val)
{
ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_FLANGER_RATE:
if(!(val >= AL_FLANGER_MIN_RATE && val <= AL_FLANGER_MAX_RATE))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Flanger rate out of range");
props->Chorus.Rate = val;
break;
case AL_FLANGER_DEPTH:
if(!(val >= AL_FLANGER_MIN_DEPTH && val <= AL_FLANGER_MAX_DEPTH))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Flanger depth out of range");
props->Chorus.Depth = val;
break;
case AL_FLANGER_FEEDBACK:
if(!(val >= AL_FLANGER_MIN_FEEDBACK && val <= AL_FLANGER_MAX_FEEDBACK))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Flanger feedback out of range");
props->Chorus.Feedback = val;
break;
case AL_FLANGER_DELAY:
if(!(val >= AL_FLANGER_MIN_DELAY && val <= AL_FLANGER_MAX_DELAY))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Flanger delay out of range");
props->Chorus.Delay = val;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid flanger float property 0x%04x", param);
}
}
void ALflanger_setParamfv(ALeffect *effect, ALCcontext *context, ALenum param, const ALfloat *vals)
{ ALflanger_setParamf(effect, context, param, vals[0]); }
void ALflanger_getParami(const ALeffect *effect, ALCcontext *context, ALenum param, ALint *val)
{
const ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_FLANGER_WAVEFORM:
*val = props->Chorus.Waveform;
break;
case AL_FLANGER_PHASE:
*val = props->Chorus.Phase;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid flanger integer property 0x%04x", param);
}
}
void ALflanger_getParamiv(const ALeffect *effect, ALCcontext *context, ALenum param, ALint *vals)
{ ALflanger_getParami(effect, context, param, vals); }
void ALflanger_getParamf(const ALeffect *effect, ALCcontext *context, ALenum param, ALfloat *val)
{
const ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_FLANGER_RATE:
*val = props->Chorus.Rate;
break;
case AL_FLANGER_DEPTH:
*val = props->Chorus.Depth;
break;
case AL_FLANGER_FEEDBACK:
*val = props->Chorus.Feedback;
break;
case AL_FLANGER_DELAY:
*val = props->Chorus.Delay;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid flanger float property 0x%04x", param);
}
}
void ALflanger_getParamfv(const ALeffect *effect, ALCcontext *context, ALenum param, ALfloat *vals)
{ ALflanger_getParamf(effect, context, param, vals); }
DEFINE_ALEFFECT_VTABLE(ALflanger);

View file

@ -0,0 +1,287 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2013 by Mike Gorchak
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <algorithm>
#include <climits>
#include <cmath>
#include <cstdlib>
#include <iterator>
#include "alcmain.h"
#include "alcontext.h"
#include "almalloc.h"
#include "alnumeric.h"
#include "alspan.h"
#include "alu.h"
#include "core/ambidefs.h"
#include "effects/base.h"
#include "effectslot.h"
#include "math_defs.h"
#include "opthelpers.h"
#include "vector.h"
namespace {
#define MAX_UPDATE_SAMPLES 256
struct ChorusState final : public EffectState {
al::vector<float,16> mSampleBuffer;
uint mOffset{0};
uint mLfoOffset{0};
uint mLfoRange{1};
float mLfoScale{0.0f};
uint mLfoDisp{0};
/* Gains for left and right sides */
struct {
float Current[MAX_OUTPUT_CHANNELS]{};
float Target[MAX_OUTPUT_CHANNELS]{};
} mGains[2];
/* effect parameters */
ChorusWaveform mWaveform{};
int mDelay{0};
float mDepth{0.0f};
float mFeedback{0.0f};
void getTriangleDelays(uint (*delays)[MAX_UPDATE_SAMPLES], const size_t todo);
void getSinusoidDelays(uint (*delays)[MAX_UPDATE_SAMPLES], const size_t todo);
void deviceUpdate(const ALCdevice *device, const Buffer &buffer) override;
void update(const ALCcontext *context, const EffectSlot *slot, const EffectProps *props,
const EffectTarget target) override;
void process(const size_t samplesToDo, const al::span<const FloatBufferLine> samplesIn,
const al::span<FloatBufferLine> samplesOut) override;
DEF_NEWDEL(ChorusState)
};
void ChorusState::deviceUpdate(const ALCdevice *Device, const Buffer&)
{
constexpr float max_delay{maxf(AL_CHORUS_MAX_DELAY, AL_FLANGER_MAX_DELAY)};
const auto frequency = static_cast<float>(Device->Frequency);
const size_t maxlen{NextPowerOf2(float2uint(max_delay*2.0f*frequency) + 1u)};
if(maxlen != mSampleBuffer.size())
al::vector<float,16>(maxlen).swap(mSampleBuffer);
std::fill(mSampleBuffer.begin(), mSampleBuffer.end(), 0.0f);
for(auto &e : mGains)
{
std::fill(std::begin(e.Current), std::end(e.Current), 0.0f);
std::fill(std::begin(e.Target), std::end(e.Target), 0.0f);
}
}
void ChorusState::update(const ALCcontext *Context, const EffectSlot *Slot,
const EffectProps *props, const EffectTarget target)
{
constexpr int mindelay{(MaxResamplerPadding>>1) << MixerFracBits};
/* The LFO depth is scaled to be relative to the sample delay. Clamp the
* delay and depth to allow enough padding for resampling.
*/
const ALCdevice *device{Context->mDevice.get()};
const auto frequency = static_cast<float>(device->Frequency);
mWaveform = props->Chorus.Waveform;
mDelay = maxi(float2int(props->Chorus.Delay*frequency*MixerFracOne + 0.5f), mindelay);
mDepth = minf(props->Chorus.Depth * static_cast<float>(mDelay),
static_cast<float>(mDelay - mindelay));
mFeedback = props->Chorus.Feedback;
/* Gains for left and right sides */
const auto lcoeffs = CalcDirectionCoeffs({-1.0f, 0.0f, 0.0f}, 0.0f);
const auto rcoeffs = CalcDirectionCoeffs({ 1.0f, 0.0f, 0.0f}, 0.0f);
mOutTarget = target.Main->Buffer;
ComputePanGains(target.Main, lcoeffs.data(), Slot->Gain, mGains[0].Target);
ComputePanGains(target.Main, rcoeffs.data(), Slot->Gain, mGains[1].Target);
float rate{props->Chorus.Rate};
if(!(rate > 0.0f))
{
mLfoOffset = 0;
mLfoRange = 1;
mLfoScale = 0.0f;
mLfoDisp = 0;
}
else
{
/* Calculate LFO coefficient (number of samples per cycle). Limit the
* max range to avoid overflow when calculating the displacement.
*/
uint lfo_range{float2uint(minf(frequency/rate + 0.5f, float{INT_MAX/360 - 180}))};
mLfoOffset = mLfoOffset * lfo_range / mLfoRange;
mLfoRange = lfo_range;
switch(mWaveform)
{
case ChorusWaveform::Triangle:
mLfoScale = 4.0f / static_cast<float>(mLfoRange);
break;
case ChorusWaveform::Sinusoid:
mLfoScale = al::MathDefs<float>::Tau() / static_cast<float>(mLfoRange);
break;
}
/* Calculate lfo phase displacement */
int phase{props->Chorus.Phase};
if(phase < 0) phase = 360 + phase;
mLfoDisp = (mLfoRange*static_cast<uint>(phase) + 180) / 360;
}
}
void ChorusState::getTriangleDelays(uint (*delays)[MAX_UPDATE_SAMPLES], const size_t todo)
{
const uint lfo_range{mLfoRange};
const float lfo_scale{mLfoScale};
const float depth{mDepth};
const int delay{mDelay};
ASSUME(lfo_range > 0);
ASSUME(todo > 0);
uint offset{mLfoOffset};
auto gen_lfo = [&offset,lfo_range,lfo_scale,depth,delay]() -> uint
{
offset = (offset+1)%lfo_range;
const float offset_norm{static_cast<float>(offset) * lfo_scale};
return static_cast<uint>(fastf2i((1.0f-std::abs(2.0f-offset_norm)) * depth) + delay);
};
std::generate_n(delays[0], todo, gen_lfo);
offset = (mLfoOffset+mLfoDisp) % lfo_range;
std::generate_n(delays[1], todo, gen_lfo);
mLfoOffset = static_cast<uint>(mLfoOffset+todo) % lfo_range;
}
void ChorusState::getSinusoidDelays(uint (*delays)[MAX_UPDATE_SAMPLES], const size_t todo)
{
const uint lfo_range{mLfoRange};
const float lfo_scale{mLfoScale};
const float depth{mDepth};
const int delay{mDelay};
ASSUME(lfo_range > 0);
ASSUME(todo > 0);
uint offset{mLfoOffset};
auto gen_lfo = [&offset,lfo_range,lfo_scale,depth,delay]() -> uint
{
offset = (offset+1)%lfo_range;
const float offset_norm{static_cast<float>(offset) * lfo_scale};
return static_cast<uint>(fastf2i(std::sin(offset_norm)*depth) + delay);
};
std::generate_n(delays[0], todo, gen_lfo);
offset = (mLfoOffset+mLfoDisp) % lfo_range;
std::generate_n(delays[1], todo, gen_lfo);
mLfoOffset = static_cast<uint>(mLfoOffset+todo) % lfo_range;
}
void ChorusState::process(const size_t samplesToDo, const al::span<const FloatBufferLine> samplesIn, const al::span<FloatBufferLine> samplesOut)
{
const size_t bufmask{mSampleBuffer.size()-1};
const float feedback{mFeedback};
const uint avgdelay{(static_cast<uint>(mDelay) + (MixerFracOne>>1)) >> MixerFracBits};
float *RESTRICT delaybuf{mSampleBuffer.data()};
uint offset{mOffset};
for(size_t base{0u};base < samplesToDo;)
{
const size_t todo{minz(MAX_UPDATE_SAMPLES, samplesToDo-base)};
uint moddelays[2][MAX_UPDATE_SAMPLES];
if(mWaveform == ChorusWaveform::Sinusoid)
getSinusoidDelays(moddelays, todo);
else /*if(mWaveform == ChorusWaveform::Triangle)*/
getTriangleDelays(moddelays, todo);
alignas(16) float temps[2][MAX_UPDATE_SAMPLES];
for(size_t i{0u};i < todo;++i)
{
// Feed the buffer's input first (necessary for delays < 1).
delaybuf[offset&bufmask] = samplesIn[0][base+i];
// Tap for the left output.
uint delay{offset - (moddelays[0][i]>>MixerFracBits)};
float mu{static_cast<float>(moddelays[0][i]&MixerFracMask) * (1.0f/MixerFracOne)};
temps[0][i] = cubic(delaybuf[(delay+1) & bufmask], delaybuf[(delay ) & bufmask],
delaybuf[(delay-1) & bufmask], delaybuf[(delay-2) & bufmask], mu);
// Tap for the right output.
delay = offset - (moddelays[1][i]>>MixerFracBits);
mu = static_cast<float>(moddelays[1][i]&MixerFracMask) * (1.0f/MixerFracOne);
temps[1][i] = cubic(delaybuf[(delay+1) & bufmask], delaybuf[(delay ) & bufmask],
delaybuf[(delay-1) & bufmask], delaybuf[(delay-2) & bufmask], mu);
// Accumulate feedback from the average delay of the taps.
delaybuf[offset&bufmask] += delaybuf[(offset-avgdelay) & bufmask] * feedback;
++offset;
}
for(ALsizei c{0};c < 2;++c)
MixSamples({temps[c], todo}, samplesOut, mGains[c].Current, mGains[c].Target,
samplesToDo-base, base);
base += todo;
}
mOffset = offset;
}
struct ChorusStateFactory final : public EffectStateFactory {
al::intrusive_ptr<EffectState> create() override
{ return al::intrusive_ptr<EffectState>{new ChorusState{}}; }
};
/* Flanger is basically a chorus with a really short delay. They can both use
* the same processing functions, so piggyback flanger on the chorus functions.
*/
struct FlangerStateFactory final : public EffectStateFactory {
al::intrusive_ptr<EffectState> create() override
{ return al::intrusive_ptr<EffectState>{new ChorusState{}}; }
};
} // namespace
EffectStateFactory *ChorusStateFactory_getFactory()
{
static ChorusStateFactory ChorusFactory{};
return &ChorusFactory;
}
EffectStateFactory *FlangerStateFactory_getFactory()
{
static FlangerStateFactory FlangerFactory{};
return &FlangerFactory;
}

View file

@ -1,250 +0,0 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2013 by Anis A. Hireche
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include <stdlib.h>
#include "config.h"
#include "alError.h"
#include "alMain.h"
#include "alAuxEffectSlot.h"
#include "alu.h"
typedef struct ALcompressorState {
DERIVE_FROM_TYPE(ALeffectState);
/* Effect gains for each channel */
ALfloat Gain[MAX_EFFECT_CHANNELS][MAX_OUTPUT_CHANNELS];
/* Effect parameters */
ALboolean Enabled;
ALfloat AttackRate;
ALfloat ReleaseRate;
ALfloat GainCtrl;
} ALcompressorState;
static ALvoid ALcompressorState_Destruct(ALcompressorState *state);
static ALboolean ALcompressorState_deviceUpdate(ALcompressorState *state, ALCdevice *device);
static ALvoid ALcompressorState_update(ALcompressorState *state, const ALCcontext *context, const ALeffectslot *slot, const ALeffectProps *props);
static ALvoid ALcompressorState_process(ALcompressorState *state, ALsizei SamplesToDo, const ALfloat (*restrict SamplesIn)[BUFFERSIZE], ALfloat (*restrict SamplesOut)[BUFFERSIZE], ALsizei NumChannels);
DECLARE_DEFAULT_ALLOCATORS(ALcompressorState)
DEFINE_ALEFFECTSTATE_VTABLE(ALcompressorState);
static void ALcompressorState_Construct(ALcompressorState *state)
{
ALeffectState_Construct(STATIC_CAST(ALeffectState, state));
SET_VTABLE2(ALcompressorState, ALeffectState, state);
state->Enabled = AL_TRUE;
state->AttackRate = 0.0f;
state->ReleaseRate = 0.0f;
state->GainCtrl = 1.0f;
}
static ALvoid ALcompressorState_Destruct(ALcompressorState *state)
{
ALeffectState_Destruct(STATIC_CAST(ALeffectState,state));
}
static ALboolean ALcompressorState_deviceUpdate(ALcompressorState *state, ALCdevice *device)
{
const ALfloat attackTime = device->Frequency * 0.2f; /* 200ms Attack */
const ALfloat releaseTime = device->Frequency * 0.4f; /* 400ms Release */
state->AttackRate = 1.0f / attackTime;
state->ReleaseRate = 1.0f / releaseTime;
return AL_TRUE;
}
static ALvoid ALcompressorState_update(ALcompressorState *state, const ALCcontext *context, const ALeffectslot *slot, const ALeffectProps *props)
{
const ALCdevice *device = context->Device;
ALuint i;
state->Enabled = props->Compressor.OnOff;
STATIC_CAST(ALeffectState,state)->OutBuffer = device->FOAOut.Buffer;
STATIC_CAST(ALeffectState,state)->OutChannels = device->FOAOut.NumChannels;
for(i = 0;i < 4;i++)
ComputeFirstOrderGains(&device->FOAOut, IdentityMatrixf.m[i],
slot->Params.Gain, state->Gain[i]);
}
static ALvoid ALcompressorState_process(ALcompressorState *state, ALsizei SamplesToDo, const ALfloat (*restrict SamplesIn)[BUFFERSIZE], ALfloat (*restrict SamplesOut)[BUFFERSIZE], ALsizei NumChannels)
{
ALsizei i, j, k;
ALsizei base;
for(base = 0;base < SamplesToDo;)
{
ALfloat temps[64][4];
ALsizei td = mini(64, SamplesToDo-base);
/* Load samples into the temp buffer first. */
for(j = 0;j < 4;j++)
{
for(i = 0;i < td;i++)
temps[i][j] = SamplesIn[j][i+base];
}
if(state->Enabled)
{
ALfloat gain = state->GainCtrl;
ALfloat output, amplitude;
for(i = 0;i < td;i++)
{
/* Roughly calculate the maximum amplitude from the 4-channel
* signal, and attack or release the gain control to reach it.
*/
amplitude = fabsf(temps[i][0]);
amplitude = maxf(amplitude + fabsf(temps[i][1]),
maxf(amplitude + fabsf(temps[i][2]),
amplitude + fabsf(temps[i][3])));
if(amplitude > gain)
gain = minf(gain+state->AttackRate, amplitude);
else if(amplitude < gain)
gain = maxf(gain-state->ReleaseRate, amplitude);
/* Apply the inverse of the gain control to normalize/compress
* the volume. */
output = 1.0f / clampf(gain, 0.5f, 2.0f);
for(j = 0;j < 4;j++)
temps[i][j] *= output;
}
state->GainCtrl = gain;
}
else
{
ALfloat gain = state->GainCtrl;
ALfloat output, amplitude;
for(i = 0;i < td;i++)
{
/* Same as above, except the amplitude is forced to 1. This
* helps ensure smooth gain changes when the compressor is
* turned on and off.
*/
amplitude = 1.0f;
if(amplitude > gain)
gain = minf(gain+state->AttackRate, amplitude);
else if(amplitude < gain)
gain = maxf(gain-state->ReleaseRate, amplitude);
output = 1.0f / clampf(gain, 0.5f, 2.0f);
for(j = 0;j < 4;j++)
temps[i][j] *= output;
}
state->GainCtrl = gain;
}
/* Now mix to the output. */
for(j = 0;j < 4;j++)
{
for(k = 0;k < NumChannels;k++)
{
ALfloat gain = state->Gain[j][k];
if(!(fabsf(gain) > GAIN_SILENCE_THRESHOLD))
continue;
for(i = 0;i < td;i++)
SamplesOut[k][base+i] += gain * temps[i][j];
}
}
base += td;
}
}
typedef struct CompressorStateFactory {
DERIVE_FROM_TYPE(EffectStateFactory);
} CompressorStateFactory;
static ALeffectState *CompressorStateFactory_create(CompressorStateFactory *UNUSED(factory))
{
ALcompressorState *state;
NEW_OBJ0(state, ALcompressorState)();
if(!state) return NULL;
return STATIC_CAST(ALeffectState, state);
}
DEFINE_EFFECTSTATEFACTORY_VTABLE(CompressorStateFactory);
EffectStateFactory *CompressorStateFactory_getFactory(void)
{
static CompressorStateFactory CompressorFactory = { { GET_VTABLE2(CompressorStateFactory, EffectStateFactory) } };
return STATIC_CAST(EffectStateFactory, &CompressorFactory);
}
void ALcompressor_setParami(ALeffect *effect, ALCcontext *context, ALenum param, ALint val)
{
ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_COMPRESSOR_ONOFF:
if(!(val >= AL_COMPRESSOR_MIN_ONOFF && val <= AL_COMPRESSOR_MAX_ONOFF))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Compressor state out of range");
props->Compressor.OnOff = val;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid compressor integer property 0x%04x",
param);
}
}
void ALcompressor_setParamiv(ALeffect *effect, ALCcontext *context, ALenum param, const ALint *vals)
{ ALcompressor_setParami(effect, context, param, vals[0]); }
void ALcompressor_setParamf(ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, ALfloat UNUSED(val))
{ alSetError(context, AL_INVALID_ENUM, "Invalid compressor float property 0x%04x", param); }
void ALcompressor_setParamfv(ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, const ALfloat *UNUSED(vals))
{ alSetError(context, AL_INVALID_ENUM, "Invalid compressor float-vector property 0x%04x", param); }
void ALcompressor_getParami(const ALeffect *effect, ALCcontext *context, ALenum param, ALint *val)
{
const ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_COMPRESSOR_ONOFF:
*val = props->Compressor.OnOff;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid compressor integer property 0x%04x",
param);
}
}
void ALcompressor_getParamiv(const ALeffect *effect, ALCcontext *context, ALenum param, ALint *vals)
{ ALcompressor_getParami(effect, context, param, vals); }
void ALcompressor_getParamf(const ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, ALfloat *UNUSED(val))
{ alSetError(context, AL_INVALID_ENUM, "Invalid compressor float property 0x%04x", param); }
void ALcompressor_getParamfv(const ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, ALfloat *UNUSED(vals))
{ alSetError(context, AL_INVALID_ENUM, "Invalid compressor float-vector property 0x%04x", param); }
DEFINE_ALEFFECT_VTABLE(ALcompressor);

View file

@ -0,0 +1,168 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2013 by Anis A. Hireche
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <cstdlib>
#include "alcmain.h"
#include "alcontext.h"
#include "alu.h"
#include "effectslot.h"
#include "vecmat.h"
namespace {
#define AMP_ENVELOPE_MIN 0.5f
#define AMP_ENVELOPE_MAX 2.0f
#define ATTACK_TIME 0.1f /* 100ms to rise from min to max */
#define RELEASE_TIME 0.2f /* 200ms to drop from max to min */
struct CompressorState final : public EffectState {
/* Effect gains for each channel */
float mGain[MaxAmbiChannels][MAX_OUTPUT_CHANNELS]{};
/* Effect parameters */
bool mEnabled{true};
float mAttackMult{1.0f};
float mReleaseMult{1.0f};
float mEnvFollower{1.0f};
void deviceUpdate(const ALCdevice *device, const Buffer &buffer) override;
void update(const ALCcontext *context, const EffectSlot *slot, const EffectProps *props,
const EffectTarget target) override;
void process(const size_t samplesToDo, const al::span<const FloatBufferLine> samplesIn,
const al::span<FloatBufferLine> samplesOut) override;
DEF_NEWDEL(CompressorState)
};
void CompressorState::deviceUpdate(const ALCdevice *device, const Buffer&)
{
/* Number of samples to do a full attack and release (non-integer sample
* counts are okay).
*/
const float attackCount{static_cast<float>(device->Frequency) * ATTACK_TIME};
const float releaseCount{static_cast<float>(device->Frequency) * RELEASE_TIME};
/* Calculate per-sample multipliers to attack and release at the desired
* rates.
*/
mAttackMult = std::pow(AMP_ENVELOPE_MAX/AMP_ENVELOPE_MIN, 1.0f/attackCount);
mReleaseMult = std::pow(AMP_ENVELOPE_MIN/AMP_ENVELOPE_MAX, 1.0f/releaseCount);
}
void CompressorState::update(const ALCcontext*, const EffectSlot *slot,
const EffectProps *props, const EffectTarget target)
{
mEnabled = props->Compressor.OnOff;
mOutTarget = target.Main->Buffer;
auto set_gains = [slot,target](auto &gains, al::span<const float,MaxAmbiChannels> coeffs)
{ ComputePanGains(target.Main, coeffs.data(), slot->Gain, gains); };
SetAmbiPanIdentity(std::begin(mGain), slot->Wet.Buffer.size(), set_gains);
}
void CompressorState::process(const size_t samplesToDo,
const al::span<const FloatBufferLine> samplesIn, const al::span<FloatBufferLine> samplesOut)
{
for(size_t base{0u};base < samplesToDo;)
{
float gains[256];
const size_t td{minz(256, samplesToDo-base)};
/* Generate the per-sample gains from the signal envelope. */
float env{mEnvFollower};
if(mEnabled)
{
for(size_t i{0u};i < td;++i)
{
/* Clamp the absolute amplitude to the defined envelope limits,
* then attack or release the envelope to reach it.
*/
const float amplitude{clampf(std::fabs(samplesIn[0][base+i]), AMP_ENVELOPE_MIN,
AMP_ENVELOPE_MAX)};
if(amplitude > env)
env = minf(env*mAttackMult, amplitude);
else if(amplitude < env)
env = maxf(env*mReleaseMult, amplitude);
/* Apply the reciprocal of the envelope to normalize the volume
* (compress the dynamic range).
*/
gains[i] = 1.0f / env;
}
}
else
{
/* Same as above, except the amplitude is forced to 1. This helps
* ensure smooth gain changes when the compressor is turned on and
* off.
*/
for(size_t i{0u};i < td;++i)
{
const float amplitude{1.0f};
if(amplitude > env)
env = minf(env*mAttackMult, amplitude);
else if(amplitude < env)
env = maxf(env*mReleaseMult, amplitude);
gains[i] = 1.0f / env;
}
}
mEnvFollower = env;
/* Now compress the signal amplitude to output. */
auto changains = std::addressof(mGain[0]);
for(const auto &input : samplesIn)
{
const float *outgains{*(changains++)};
for(FloatBufferLine &output : samplesOut)
{
const float gain{*(outgains++)};
if(!(std::fabs(gain) > GainSilenceThreshold))
continue;
for(size_t i{0u};i < td;i++)
output[base+i] += input[base+i] * gains[i] * gain;
}
}
base += td;
}
}
struct CompressorStateFactory final : public EffectStateFactory {
al::intrusive_ptr<EffectState> create() override
{ return al::intrusive_ptr<EffectState>{new CompressorState{}}; }
};
} // namespace
EffectStateFactory *CompressorStateFactory_getFactory()
{
static CompressorStateFactory CompressorFactory{};
return &CompressorFactory;
}

View file

@ -0,0 +1,567 @@
#include "config.h"
#include <stdint.h>
#ifdef HAVE_SSE_INTRINSICS
#include <xmmintrin.h>
#elif defined(HAVE_NEON)
#include <arm_neon.h>
#endif
#include "alcmain.h"
#include "alcomplex.h"
#include "alcontext.h"
#include "almalloc.h"
#include "alspan.h"
#include "bformatdec.h"
#include "buffer_storage.h"
#include "core/ambidefs.h"
#include "core/filters/splitter.h"
#include "core/fmt_traits.h"
#include "core/logging.h"
#include "effects/base.h"
#include "effectslot.h"
#include "math_defs.h"
#include "polyphase_resampler.h"
namespace {
/* Convolution reverb is implemented using a segmented overlap-add method. The
* impulse response is broken up into multiple segments of 128 samples, and
* each segment has an FFT applied with a 256-sample buffer (the latter half
* left silent) to get its frequency-domain response. The resulting response
* has its positive/non-mirrored frequencies saved (129 bins) in each segment.
*
* Input samples are similarly broken up into 128-sample segments, with an FFT
* applied to each new incoming segment to get its 129 bins. A history of FFT'd
* input segments is maintained, equal to the length of the impulse response.
*
* To apply the reverberation, each impulse response segment is convolved with
* its paired input segment (using complex multiplies, far cheaper than FIRs),
* accumulating into a 256-bin FFT buffer. The input history is then shifted to
* align with later impulse response segments for next time.
*
* An inverse FFT is then applied to the accumulated FFT buffer to get a 256-
* sample time-domain response for output, which is split in two halves. The
* first half is the 128-sample output, and the second half is a 128-sample
* (really, 127) delayed extension, which gets added to the output next time.
* Convolving two time-domain responses of lengths N and M results in a time-
* domain signal of length N+M-1, and this holds true regardless of the
* convolution being applied in the frequency domain, so these "overflow"
* samples need to be accounted for.
*
* To avoid a delay with gathering enough input samples to apply an FFT with,
* the first segment is applied directly in the time-domain as the samples come
* in. Once enough have been retrieved, the FFT is applied on the input and
* it's paired with the remaining (FFT'd) filter segments for processing.
*/
void LoadSamples(double *RESTRICT dst, const al::byte *src, const size_t srcstep, FmtType srctype,
const size_t samples) noexcept
{
#define HANDLE_FMT(T) case T: al::LoadSampleArray<T>(dst, src, srcstep, samples); break
switch(srctype)
{
HANDLE_FMT(FmtUByte);
HANDLE_FMT(FmtShort);
HANDLE_FMT(FmtFloat);
HANDLE_FMT(FmtDouble);
HANDLE_FMT(FmtMulaw);
HANDLE_FMT(FmtAlaw);
}
#undef HANDLE_FMT
}
inline auto& GetAmbiScales(AmbiScaling scaletype) noexcept
{
if(scaletype == AmbiScaling::FuMa) return AmbiScale::FromFuMa();
if(scaletype == AmbiScaling::SN3D) return AmbiScale::FromSN3D();
return AmbiScale::FromN3D();
}
inline auto& GetAmbiLayout(AmbiLayout layouttype) noexcept
{
if(layouttype == AmbiLayout::FuMa) return AmbiIndex::FromFuMa();
return AmbiIndex::FromACN();
}
inline auto& GetAmbi2DLayout(AmbiLayout layouttype) noexcept
{
if(layouttype == AmbiLayout::FuMa) return AmbiIndex::FromFuMa2D();
return AmbiIndex::FromACN2D();
}
struct ChanMap {
Channel channel;
float angle;
float elevation;
};
using complex_d = std::complex<double>;
constexpr size_t ConvolveUpdateSize{256};
constexpr size_t ConvolveUpdateSamples{ConvolveUpdateSize / 2};
void apply_fir(al::span<float> dst, const float *RESTRICT src, const float *RESTRICT filter)
{
#ifdef HAVE_SSE_INTRINSICS
for(float &output : dst)
{
__m128 r4{_mm_setzero_ps()};
for(size_t j{0};j < ConvolveUpdateSamples;j+=4)
{
const __m128 coeffs{_mm_load_ps(&filter[j])};
const __m128 s{_mm_loadu_ps(&src[j])};
r4 = _mm_add_ps(r4, _mm_mul_ps(s, coeffs));
}
r4 = _mm_add_ps(r4, _mm_shuffle_ps(r4, r4, _MM_SHUFFLE(0, 1, 2, 3)));
r4 = _mm_add_ps(r4, _mm_movehl_ps(r4, r4));
output = _mm_cvtss_f32(r4);
++src;
}
#elif defined(HAVE_NEON)
for(float &output : dst)
{
float32x4_t r4{vdupq_n_f32(0.0f)};
for(size_t j{0};j < ConvolveUpdateSamples;j+=4)
r4 = vmlaq_f32(r4, vld1q_f32(&src[j]), vld1q_f32(&filter[j]));
r4 = vaddq_f32(r4, vrev64q_f32(r4));
output = vget_lane_f32(vadd_f32(vget_low_f32(r4), vget_high_f32(r4)), 0);
++src;
}
#else
for(float &output : dst)
{
float ret{0.0f};
for(size_t j{0};j < ConvolveUpdateSamples;++j)
ret += src[j] * filter[j];
output = ret;
++src;
}
#endif
}
struct ConvolutionState final : public EffectState {
FmtChannels mChannels{};
AmbiLayout mAmbiLayout{};
AmbiScaling mAmbiScaling{};
uint mAmbiOrder{};
size_t mFifoPos{0};
std::array<float,ConvolveUpdateSamples*2> mInput{};
al::vector<std::array<float,ConvolveUpdateSamples>,16> mFilter;
al::vector<std::array<float,ConvolveUpdateSamples*2>,16> mOutput;
alignas(16) std::array<complex_d,ConvolveUpdateSize> mFftBuffer{};
size_t mCurrentSegment{0};
size_t mNumConvolveSegs{0};
struct ChannelData {
alignas(16) FloatBufferLine mBuffer{};
float mHfScale{};
BandSplitter mFilter{};
float Current[MAX_OUTPUT_CHANNELS]{};
float Target[MAX_OUTPUT_CHANNELS]{};
};
using ChannelDataArray = al::FlexArray<ChannelData>;
std::unique_ptr<ChannelDataArray> mChans;
std::unique_ptr<complex_d[]> mComplexData;
ConvolutionState() = default;
~ConvolutionState() override = default;
void NormalMix(const al::span<FloatBufferLine> samplesOut, const size_t samplesToDo);
void UpsampleMix(const al::span<FloatBufferLine> samplesOut, const size_t samplesToDo);
void (ConvolutionState::*mMix)(const al::span<FloatBufferLine>,const size_t)
{&ConvolutionState::NormalMix};
void deviceUpdate(const ALCdevice *device, const Buffer &buffer) override;
void update(const ALCcontext *context, const EffectSlot *slot, const EffectProps *props,
const EffectTarget target) override;
void process(const size_t samplesToDo, const al::span<const FloatBufferLine> samplesIn,
const al::span<FloatBufferLine> samplesOut) override;
DEF_NEWDEL(ConvolutionState)
};
void ConvolutionState::NormalMix(const al::span<FloatBufferLine> samplesOut,
const size_t samplesToDo)
{
for(auto &chan : *mChans)
MixSamples({chan.mBuffer.data(), samplesToDo}, samplesOut, chan.Current, chan.Target,
samplesToDo, 0);
}
void ConvolutionState::UpsampleMix(const al::span<FloatBufferLine> samplesOut,
const size_t samplesToDo)
{
for(auto &chan : *mChans)
{
const al::span<float> src{chan.mBuffer.data(), samplesToDo};
chan.mFilter.processHfScale(src, chan.mHfScale);
MixSamples(src, samplesOut, chan.Current, chan.Target, samplesToDo, 0);
}
}
void ConvolutionState::deviceUpdate(const ALCdevice *device, const Buffer &buffer)
{
constexpr uint MaxConvolveAmbiOrder{1u};
mFifoPos = 0;
mInput.fill(0.0f);
decltype(mFilter){}.swap(mFilter);
decltype(mOutput){}.swap(mOutput);
mFftBuffer.fill(complex_d{});
mCurrentSegment = 0;
mNumConvolveSegs = 0;
mChans = nullptr;
mComplexData = nullptr;
/* An empty buffer doesn't need a convolution filter. */
if(!buffer.storage || buffer.storage->mSampleLen < 1) return;
constexpr size_t m{ConvolveUpdateSize/2 + 1};
auto bytesPerSample = BytesFromFmt(buffer.storage->mType);
auto realChannels = ChannelsFromFmt(buffer.storage->mChannels, buffer.storage->mAmbiOrder);
auto numChannels = ChannelsFromFmt(buffer.storage->mChannels,
minu(buffer.storage->mAmbiOrder, MaxConvolveAmbiOrder));
mChans = ChannelDataArray::Create(numChannels);
/* The impulse response needs to have the same sample rate as the input and
* output. The bsinc24 resampler is decent, but there is high-frequency
* attenation that some people may be able to pick up on. Since this is
* called very infrequently, go ahead and use the polyphase resampler.
*/
PPhaseResampler resampler;
if(device->Frequency != buffer.storage->mSampleRate)
resampler.init(buffer.storage->mSampleRate, device->Frequency);
const auto resampledCount = static_cast<uint>(
(uint64_t{buffer.storage->mSampleLen}*device->Frequency+(buffer.storage->mSampleRate-1)) /
buffer.storage->mSampleRate);
const BandSplitter splitter{device->mXOverFreq / static_cast<float>(device->Frequency)};
for(auto &e : *mChans)
e.mFilter = splitter;
mFilter.resize(numChannels, {});
mOutput.resize(numChannels, {});
/* Calculate the number of segments needed to hold the impulse response and
* the input history (rounded up), and allocate them. Exclude one segment
* which gets applied as a time-domain FIR filter. Make sure at least one
* segment is allocated to simplify handling.
*/
mNumConvolveSegs = (resampledCount+(ConvolveUpdateSamples-1)) / ConvolveUpdateSamples;
mNumConvolveSegs = maxz(mNumConvolveSegs, 2) - 1;
const size_t complex_length{mNumConvolveSegs * m * (numChannels+1)};
mComplexData = std::make_unique<complex_d[]>(complex_length);
std::fill_n(mComplexData.get(), complex_length, complex_d{});
mChannels = buffer.storage->mChannels;
mAmbiLayout = buffer.storage->mAmbiLayout;
mAmbiScaling = buffer.storage->mAmbiScaling;
mAmbiOrder = minu(buffer.storage->mAmbiOrder, MaxConvolveAmbiOrder);
auto srcsamples = std::make_unique<double[]>(maxz(buffer.storage->mSampleLen, resampledCount));
complex_d *filteriter = mComplexData.get() + mNumConvolveSegs*m;
for(size_t c{0};c < numChannels;++c)
{
/* Load the samples from the buffer, and resample to match the device. */
LoadSamples(srcsamples.get(), buffer.samples.data() + bytesPerSample*c, realChannels,
buffer.storage->mType, buffer.storage->mSampleLen);
if(device->Frequency != buffer.storage->mSampleRate)
resampler.process(buffer.storage->mSampleLen, srcsamples.get(), resampledCount,
srcsamples.get());
/* Store the first segment's samples in reverse in the time-domain, to
* apply as a FIR filter.
*/
const size_t first_size{minz(resampledCount, ConvolveUpdateSamples)};
std::transform(srcsamples.get(), srcsamples.get()+first_size, mFilter[c].rbegin(),
[](const double d) noexcept -> float { return static_cast<float>(d); });
size_t done{first_size};
for(size_t s{0};s < mNumConvolveSegs;++s)
{
const size_t todo{minz(resampledCount-done, ConvolveUpdateSamples)};
auto iter = std::copy_n(&srcsamples[done], todo, mFftBuffer.begin());
done += todo;
std::fill(iter, mFftBuffer.end(), complex_d{});
forward_fft(mFftBuffer);
filteriter = std::copy_n(mFftBuffer.cbegin(), m, filteriter);
}
}
}
void ConvolutionState::update(const ALCcontext *context, const EffectSlot *slot,
const EffectProps* /*props*/, const EffectTarget target)
{
/* NOTE: Stereo and Rear are slightly different from normal mixing (as
* defined in alu.cpp). These are 45 degrees from center, rather than the
* 30 degrees used there.
*
* TODO: LFE is not mixed to output. This will require each buffer channel
* to have its own output target since the main mixing buffer won't have an
* LFE channel (due to being B-Format).
*/
static const ChanMap MonoMap[1]{
{ FrontCenter, 0.0f, 0.0f }
}, StereoMap[2]{
{ FrontLeft, Deg2Rad(-45.0f), Deg2Rad(0.0f) },
{ FrontRight, Deg2Rad( 45.0f), Deg2Rad(0.0f) }
}, RearMap[2]{
{ BackLeft, Deg2Rad(-135.0f), Deg2Rad(0.0f) },
{ BackRight, Deg2Rad( 135.0f), Deg2Rad(0.0f) }
}, QuadMap[4]{
{ FrontLeft, Deg2Rad( -45.0f), Deg2Rad(0.0f) },
{ FrontRight, Deg2Rad( 45.0f), Deg2Rad(0.0f) },
{ BackLeft, Deg2Rad(-135.0f), Deg2Rad(0.0f) },
{ BackRight, Deg2Rad( 135.0f), Deg2Rad(0.0f) }
}, X51Map[6]{
{ FrontLeft, Deg2Rad( -30.0f), Deg2Rad(0.0f) },
{ FrontRight, Deg2Rad( 30.0f), Deg2Rad(0.0f) },
{ FrontCenter, Deg2Rad( 0.0f), Deg2Rad(0.0f) },
{ LFE, 0.0f, 0.0f },
{ SideLeft, Deg2Rad(-110.0f), Deg2Rad(0.0f) },
{ SideRight, Deg2Rad( 110.0f), Deg2Rad(0.0f) }
}, X61Map[7]{
{ FrontLeft, Deg2Rad(-30.0f), Deg2Rad(0.0f) },
{ FrontRight, Deg2Rad( 30.0f), Deg2Rad(0.0f) },
{ FrontCenter, Deg2Rad( 0.0f), Deg2Rad(0.0f) },
{ LFE, 0.0f, 0.0f },
{ BackCenter, Deg2Rad(180.0f), Deg2Rad(0.0f) },
{ SideLeft, Deg2Rad(-90.0f), Deg2Rad(0.0f) },
{ SideRight, Deg2Rad( 90.0f), Deg2Rad(0.0f) }
}, X71Map[8]{
{ FrontLeft, Deg2Rad( -30.0f), Deg2Rad(0.0f) },
{ FrontRight, Deg2Rad( 30.0f), Deg2Rad(0.0f) },
{ FrontCenter, Deg2Rad( 0.0f), Deg2Rad(0.0f) },
{ LFE, 0.0f, 0.0f },
{ BackLeft, Deg2Rad(-150.0f), Deg2Rad(0.0f) },
{ BackRight, Deg2Rad( 150.0f), Deg2Rad(0.0f) },
{ SideLeft, Deg2Rad( -90.0f), Deg2Rad(0.0f) },
{ SideRight, Deg2Rad( 90.0f), Deg2Rad(0.0f) }
};
if(mNumConvolveSegs < 1)
return;
mMix = &ConvolutionState::NormalMix;
for(auto &chan : *mChans)
std::fill(std::begin(chan.Target), std::end(chan.Target), 0.0f);
const float gain{slot->Gain};
if(mChannels == FmtBFormat3D || mChannels == FmtBFormat2D)
{
ALCdevice *device{context->mDevice.get()};
if(device->mAmbiOrder > mAmbiOrder)
{
mMix = &ConvolutionState::UpsampleMix;
const auto scales = BFormatDec::GetHFOrderScales(mAmbiOrder, device->mAmbiOrder);
(*mChans)[0].mHfScale = scales[0];
for(size_t i{1};i < mChans->size();++i)
(*mChans)[i].mHfScale = scales[1];
}
mOutTarget = target.Main->Buffer;
auto&& scales = GetAmbiScales(mAmbiScaling);
const uint8_t *index_map{(mChannels == FmtBFormat2D) ?
GetAmbi2DLayout(mAmbiLayout).data() :
GetAmbiLayout(mAmbiLayout).data()};
std::array<float,MaxAmbiChannels> coeffs{};
for(size_t c{0u};c < mChans->size();++c)
{
const size_t acn{index_map[c]};
coeffs[acn] = scales[acn];
ComputePanGains(target.Main, coeffs.data(), gain, (*mChans)[c].Target);
coeffs[acn] = 0.0f;
}
}
else
{
ALCdevice *device{context->mDevice.get()};
al::span<const ChanMap> chanmap{};
switch(mChannels)
{
case FmtMono: chanmap = MonoMap; break;
case FmtStereo: chanmap = StereoMap; break;
case FmtRear: chanmap = RearMap; break;
case FmtQuad: chanmap = QuadMap; break;
case FmtX51: chanmap = X51Map; break;
case FmtX61: chanmap = X61Map; break;
case FmtX71: chanmap = X71Map; break;
case FmtBFormat2D:
case FmtBFormat3D:
break;
}
mOutTarget = target.Main->Buffer;
if(device->mRenderMode == RenderMode::Pairwise)
{
auto ScaleAzimuthFront = [](float azimuth, float scale) -> float
{
const float abs_azi{std::fabs(azimuth)};
if(!(abs_azi >= al::MathDefs<float>::Pi()*0.5f))
return std::copysign(minf(abs_azi*scale, al::MathDefs<float>::Pi()*0.5f), azimuth);
return azimuth;
};
for(size_t i{0};i < chanmap.size();++i)
{
if(chanmap[i].channel == LFE) continue;
const auto coeffs = CalcAngleCoeffs(ScaleAzimuthFront(chanmap[i].angle, 2.0f),
chanmap[i].elevation, 0.0f);
ComputePanGains(target.Main, coeffs.data(), gain, (*mChans)[i].Target);
}
}
else for(size_t i{0};i < chanmap.size();++i)
{
if(chanmap[i].channel == LFE) continue;
const auto coeffs = CalcAngleCoeffs(chanmap[i].angle, chanmap[i].elevation, 0.0f);
ComputePanGains(target.Main, coeffs.data(), gain, (*mChans)[i].Target);
}
}
}
void ConvolutionState::process(const size_t samplesToDo,
const al::span<const FloatBufferLine> samplesIn, const al::span<FloatBufferLine> samplesOut)
{
if(mNumConvolveSegs < 1)
return;
constexpr size_t m{ConvolveUpdateSize/2 + 1};
size_t curseg{mCurrentSegment};
auto &chans = *mChans;
for(size_t base{0u};base < samplesToDo;)
{
const size_t todo{minz(ConvolveUpdateSamples-mFifoPos, samplesToDo-base)};
std::copy_n(samplesIn[0].begin() + base, todo,
mInput.begin()+ConvolveUpdateSamples+mFifoPos);
/* Apply the FIR for the newly retrieved input samples, and combine it
* with the inverse FFT'd output samples.
*/
for(size_t c{0};c < chans.size();++c)
{
auto buf_iter = chans[c].mBuffer.begin() + base;
apply_fir({std::addressof(*buf_iter), todo}, mInput.data()+1 + mFifoPos,
mFilter[c].data());
auto fifo_iter = mOutput[c].begin() + mFifoPos;
std::transform(fifo_iter, fifo_iter+todo, buf_iter, buf_iter, std::plus<>{});
}
mFifoPos += todo;
base += todo;
/* Check whether the input buffer is filled with new samples. */
if(mFifoPos < ConvolveUpdateSamples) break;
mFifoPos = 0;
/* Move the newest input to the front for the next iteration's history. */
std::copy(mInput.cbegin()+ConvolveUpdateSamples, mInput.cend(), mInput.begin());
/* Calculate the frequency domain response and add the relevant
* frequency bins to the FFT history.
*/
auto fftiter = std::copy_n(mInput.cbegin(), ConvolveUpdateSamples, mFftBuffer.begin());
std::fill(fftiter, mFftBuffer.end(), complex_d{});
forward_fft(mFftBuffer);
std::copy_n(mFftBuffer.cbegin(), m, &mComplexData[curseg*m]);
const complex_d *RESTRICT filter{mComplexData.get() + mNumConvolveSegs*m};
for(size_t c{0};c < chans.size();++c)
{
std::fill_n(mFftBuffer.begin(), m, complex_d{});
/* Convolve each input segment with its IR filter counterpart
* (aligned in time).
*/
const complex_d *RESTRICT input{&mComplexData[curseg*m]};
for(size_t s{curseg};s < mNumConvolveSegs;++s)
{
for(size_t i{0};i < m;++i,++input,++filter)
mFftBuffer[i] += *input * *filter;
}
input = mComplexData.get();
for(size_t s{0};s < curseg;++s)
{
for(size_t i{0};i < m;++i,++input,++filter)
mFftBuffer[i] += *input * *filter;
}
/* Reconstruct the mirrored/negative frequencies to do a proper
* inverse FFT.
*/
for(size_t i{m};i < ConvolveUpdateSize;++i)
mFftBuffer[i] = std::conj(mFftBuffer[ConvolveUpdateSize-i]);
/* Apply iFFT to get the 256 (really 255) samples for output. The
* 128 output samples are combined with the last output's 127
* second-half samples (and this output's second half is
* subsequently saved for next time).
*/
inverse_fft(mFftBuffer);
/* The iFFT'd response is scaled up by the number of bins, so apply
* the inverse to normalize the output.
*/
for(size_t i{0};i < ConvolveUpdateSamples;++i)
mOutput[c][i] =
static_cast<float>(mFftBuffer[i].real() * (1.0/double{ConvolveUpdateSize})) +
mOutput[c][ConvolveUpdateSamples+i];
for(size_t i{0};i < ConvolveUpdateSamples;++i)
mOutput[c][ConvolveUpdateSamples+i] =
static_cast<float>(mFftBuffer[ConvolveUpdateSamples+i].real() *
(1.0/double{ConvolveUpdateSize}));
}
/* Shift the input history. */
curseg = curseg ? (curseg-1) : (mNumConvolveSegs-1);
}
mCurrentSegment = curseg;
/* Finally, mix to the output. */
(this->*mMix)(samplesOut, samplesToDo);
}
struct ConvolutionStateFactory final : public EffectStateFactory {
al::intrusive_ptr<EffectState> create() override
{ return al::intrusive_ptr<EffectState>{new ConvolutionState{}}; }
};
} // namespace
EffectStateFactory *ConvolutionStateFactory_getFactory()
{
static ConvolutionStateFactory ConvolutionFactory{};
return &ConvolutionFactory;
}

View file

@ -1,184 +0,0 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2011 by Chris Robinson.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <stdlib.h>
#include "alMain.h"
#include "alAuxEffectSlot.h"
#include "alError.h"
#include "alu.h"
#include "filters/defs.h"
typedef struct ALdedicatedState {
DERIVE_FROM_TYPE(ALeffectState);
ALfloat CurrentGains[MAX_OUTPUT_CHANNELS];
ALfloat TargetGains[MAX_OUTPUT_CHANNELS];
} ALdedicatedState;
static ALvoid ALdedicatedState_Destruct(ALdedicatedState *state);
static ALboolean ALdedicatedState_deviceUpdate(ALdedicatedState *state, ALCdevice *device);
static ALvoid ALdedicatedState_update(ALdedicatedState *state, const ALCcontext *context, const ALeffectslot *slot, const ALeffectProps *props);
static ALvoid ALdedicatedState_process(ALdedicatedState *state, ALsizei SamplesToDo, const ALfloat (*restrict SamplesIn)[BUFFERSIZE], ALfloat (*restrict SamplesOut)[BUFFERSIZE], ALsizei NumChannels);
DECLARE_DEFAULT_ALLOCATORS(ALdedicatedState)
DEFINE_ALEFFECTSTATE_VTABLE(ALdedicatedState);
static void ALdedicatedState_Construct(ALdedicatedState *state)
{
ALeffectState_Construct(STATIC_CAST(ALeffectState, state));
SET_VTABLE2(ALdedicatedState, ALeffectState, state);
}
static ALvoid ALdedicatedState_Destruct(ALdedicatedState *state)
{
ALeffectState_Destruct(STATIC_CAST(ALeffectState,state));
}
static ALboolean ALdedicatedState_deviceUpdate(ALdedicatedState *state, ALCdevice *UNUSED(device))
{
ALsizei i;
for(i = 0;i < MAX_OUTPUT_CHANNELS;i++)
state->CurrentGains[i] = 0.0f;
return AL_TRUE;
}
static ALvoid ALdedicatedState_update(ALdedicatedState *state, const ALCcontext *context, const ALeffectslot *slot, const ALeffectProps *props)
{
const ALCdevice *device = context->Device;
ALfloat Gain;
ALsizei i;
for(i = 0;i < MAX_OUTPUT_CHANNELS;i++)
state->TargetGains[i] = 0.0f;
Gain = slot->Params.Gain * props->Dedicated.Gain;
if(slot->Params.EffectType == AL_EFFECT_DEDICATED_LOW_FREQUENCY_EFFECT)
{
int idx;
if((idx=GetChannelIdxByName(&device->RealOut, LFE)) != -1)
{
STATIC_CAST(ALeffectState,state)->OutBuffer = device->RealOut.Buffer;
STATIC_CAST(ALeffectState,state)->OutChannels = device->RealOut.NumChannels;
state->TargetGains[idx] = Gain;
}
}
else if(slot->Params.EffectType == AL_EFFECT_DEDICATED_DIALOGUE)
{
int idx;
/* Dialog goes to the front-center speaker if it exists, otherwise it
* plays from the front-center location. */
if((idx=GetChannelIdxByName(&device->RealOut, FrontCenter)) != -1)
{
STATIC_CAST(ALeffectState,state)->OutBuffer = device->RealOut.Buffer;
STATIC_CAST(ALeffectState,state)->OutChannels = device->RealOut.NumChannels;
state->TargetGains[idx] = Gain;
}
else
{
ALfloat coeffs[MAX_AMBI_COEFFS];
CalcAngleCoeffs(0.0f, 0.0f, 0.0f, coeffs);
STATIC_CAST(ALeffectState,state)->OutBuffer = device->Dry.Buffer;
STATIC_CAST(ALeffectState,state)->OutChannels = device->Dry.NumChannels;
ComputeDryPanGains(&device->Dry, coeffs, Gain, state->TargetGains);
}
}
}
static ALvoid ALdedicatedState_process(ALdedicatedState *state, ALsizei SamplesToDo, const ALfloat (*restrict SamplesIn)[BUFFERSIZE], ALfloat (*restrict SamplesOut)[BUFFERSIZE], ALsizei NumChannels)
{
MixSamples(SamplesIn[0], NumChannels, SamplesOut, state->CurrentGains,
state->TargetGains, SamplesToDo, 0, SamplesToDo);
}
typedef struct DedicatedStateFactory {
DERIVE_FROM_TYPE(EffectStateFactory);
} DedicatedStateFactory;
ALeffectState *DedicatedStateFactory_create(DedicatedStateFactory *UNUSED(factory))
{
ALdedicatedState *state;
NEW_OBJ0(state, ALdedicatedState)();
if(!state) return NULL;
return STATIC_CAST(ALeffectState, state);
}
DEFINE_EFFECTSTATEFACTORY_VTABLE(DedicatedStateFactory);
EffectStateFactory *DedicatedStateFactory_getFactory(void)
{
static DedicatedStateFactory DedicatedFactory = { { GET_VTABLE2(DedicatedStateFactory, EffectStateFactory) } };
return STATIC_CAST(EffectStateFactory, &DedicatedFactory);
}
void ALdedicated_setParami(ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, ALint UNUSED(val))
{ alSetError(context, AL_INVALID_ENUM, "Invalid dedicated integer property 0x%04x", param); }
void ALdedicated_setParamiv(ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, const ALint *UNUSED(vals))
{ alSetError(context, AL_INVALID_ENUM, "Invalid dedicated integer-vector property 0x%04x", param); }
void ALdedicated_setParamf(ALeffect *effect, ALCcontext *context, ALenum param, ALfloat val)
{
ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_DEDICATED_GAIN:
if(!(val >= 0.0f && isfinite(val)))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Dedicated gain out of range");
props->Dedicated.Gain = val;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid dedicated float property 0x%04x", param);
}
}
void ALdedicated_setParamfv(ALeffect *effect, ALCcontext *context, ALenum param, const ALfloat *vals)
{ ALdedicated_setParamf(effect, context, param, vals[0]); }
void ALdedicated_getParami(const ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, ALint *UNUSED(val))
{ alSetError(context, AL_INVALID_ENUM, "Invalid dedicated integer property 0x%04x", param); }
void ALdedicated_getParamiv(const ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, ALint *UNUSED(vals))
{ alSetError(context, AL_INVALID_ENUM, "Invalid dedicated integer-vector property 0x%04x", param); }
void ALdedicated_getParamf(const ALeffect *effect, ALCcontext *context, ALenum param, ALfloat *val)
{
const ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_DEDICATED_GAIN:
*val = props->Dedicated.Gain;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid dedicated float property 0x%04x", param);
}
}
void ALdedicated_getParamfv(const ALeffect *effect, ALCcontext *context, ALenum param, ALfloat *vals)
{ ALdedicated_getParamf(effect, context, param, vals); }
DEFINE_ALEFFECT_VTABLE(ALdedicated);

View file

@ -0,0 +1,110 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2011 by Chris Robinson.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <cstdlib>
#include <cmath>
#include <algorithm>
#include "alcmain.h"
#include "alcontext.h"
#include "alu.h"
#include "effectslot.h"
namespace {
struct DedicatedState final : public EffectState {
float mCurrentGains[MAX_OUTPUT_CHANNELS];
float mTargetGains[MAX_OUTPUT_CHANNELS];
void deviceUpdate(const ALCdevice *device, const Buffer &buffer) override;
void update(const ALCcontext *context, const EffectSlot *slot, const EffectProps *props,
const EffectTarget target) override;
void process(const size_t samplesToDo, const al::span<const FloatBufferLine> samplesIn,
const al::span<FloatBufferLine> samplesOut) override;
DEF_NEWDEL(DedicatedState)
};
void DedicatedState::deviceUpdate(const ALCdevice*, const Buffer&)
{
std::fill(std::begin(mCurrentGains), std::end(mCurrentGains), 0.0f);
}
void DedicatedState::update(const ALCcontext*, const EffectSlot *slot,
const EffectProps *props, const EffectTarget target)
{
std::fill(std::begin(mTargetGains), std::end(mTargetGains), 0.0f);
const float Gain{slot->Gain * props->Dedicated.Gain};
if(slot->EffectType == EffectSlotType::DedicatedLFE)
{
const uint idx{!target.RealOut ? INVALID_CHANNEL_INDEX :
GetChannelIdxByName(*target.RealOut, LFE)};
if(idx != INVALID_CHANNEL_INDEX)
{
mOutTarget = target.RealOut->Buffer;
mTargetGains[idx] = Gain;
}
}
else if(slot->EffectType == EffectSlotType::DedicatedDialog)
{
/* Dialog goes to the front-center speaker if it exists, otherwise it
* plays from the front-center location. */
const uint idx{!target.RealOut ? INVALID_CHANNEL_INDEX :
GetChannelIdxByName(*target.RealOut, FrontCenter)};
if(idx != INVALID_CHANNEL_INDEX)
{
mOutTarget = target.RealOut->Buffer;
mTargetGains[idx] = Gain;
}
else
{
const auto coeffs = CalcDirectionCoeffs({0.0f, 0.0f, -1.0f}, 0.0f);
mOutTarget = target.Main->Buffer;
ComputePanGains(target.Main, coeffs.data(), Gain, mTargetGains);
}
}
}
void DedicatedState::process(const size_t samplesToDo, const al::span<const FloatBufferLine> samplesIn, const al::span<FloatBufferLine> samplesOut)
{
MixSamples({samplesIn[0].data(), samplesToDo}, samplesOut, mCurrentGains, mTargetGains,
samplesToDo, 0);
}
struct DedicatedStateFactory final : public EffectStateFactory {
al::intrusive_ptr<EffectState> create() override
{ return al::intrusive_ptr<EffectState>{new DedicatedState{}}; }
};
} // namespace
EffectStateFactory *DedicatedStateFactory_getFactory()
{
static DedicatedStateFactory DedicatedFactory{};
return &DedicatedFactory;
}

View file

@ -1,287 +0,0 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2013 by Mike Gorchak
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <math.h>
#include <stdlib.h>
#include "alMain.h"
#include "alAuxEffectSlot.h"
#include "alError.h"
#include "alu.h"
#include "filters/defs.h"
typedef struct ALdistortionState {
DERIVE_FROM_TYPE(ALeffectState);
/* Effect gains for each channel */
ALfloat Gain[MAX_OUTPUT_CHANNELS];
/* Effect parameters */
BiquadFilter lowpass;
BiquadFilter bandpass;
ALfloat attenuation;
ALfloat edge_coeff;
ALfloat Buffer[2][BUFFERSIZE];
} ALdistortionState;
static ALvoid ALdistortionState_Destruct(ALdistortionState *state);
static ALboolean ALdistortionState_deviceUpdate(ALdistortionState *state, ALCdevice *device);
static ALvoid ALdistortionState_update(ALdistortionState *state, const ALCcontext *context, const ALeffectslot *slot, const ALeffectProps *props);
static ALvoid ALdistortionState_process(ALdistortionState *state, ALsizei SamplesToDo, const ALfloat (*restrict SamplesIn)[BUFFERSIZE], ALfloat (*restrict SamplesOut)[BUFFERSIZE], ALsizei NumChannels);
DECLARE_DEFAULT_ALLOCATORS(ALdistortionState)
DEFINE_ALEFFECTSTATE_VTABLE(ALdistortionState);
static void ALdistortionState_Construct(ALdistortionState *state)
{
ALeffectState_Construct(STATIC_CAST(ALeffectState, state));
SET_VTABLE2(ALdistortionState, ALeffectState, state);
}
static ALvoid ALdistortionState_Destruct(ALdistortionState *state)
{
ALeffectState_Destruct(STATIC_CAST(ALeffectState,state));
}
static ALboolean ALdistortionState_deviceUpdate(ALdistortionState *state, ALCdevice *UNUSED(device))
{
BiquadFilter_clear(&state->lowpass);
BiquadFilter_clear(&state->bandpass);
return AL_TRUE;
}
static ALvoid ALdistortionState_update(ALdistortionState *state, const ALCcontext *context, const ALeffectslot *slot, const ALeffectProps *props)
{
const ALCdevice *device = context->Device;
ALfloat frequency = (ALfloat)device->Frequency;
ALfloat coeffs[MAX_AMBI_COEFFS];
ALfloat bandwidth;
ALfloat cutoff;
ALfloat edge;
/* Store waveshaper edge settings. */
edge = sinf(props->Distortion.Edge * (F_PI_2));
edge = minf(edge, 0.99f);
state->edge_coeff = 2.0f * edge / (1.0f-edge);
cutoff = props->Distortion.LowpassCutoff;
/* Bandwidth value is constant in octaves. */
bandwidth = (cutoff / 2.0f) / (cutoff * 0.67f);
/* Multiply sampling frequency by the amount of oversampling done during
* processing.
*/
BiquadFilter_setParams(&state->lowpass, BiquadType_LowPass, 1.0f,
cutoff / (frequency*4.0f), calc_rcpQ_from_bandwidth(cutoff / (frequency*4.0f), bandwidth)
);
cutoff = props->Distortion.EQCenter;
/* Convert bandwidth in Hz to octaves. */
bandwidth = props->Distortion.EQBandwidth / (cutoff * 0.67f);
BiquadFilter_setParams(&state->bandpass, BiquadType_BandPass, 1.0f,
cutoff / (frequency*4.0f), calc_rcpQ_from_bandwidth(cutoff / (frequency*4.0f), bandwidth)
);
CalcAngleCoeffs(0.0f, 0.0f, 0.0f, coeffs);
ComputeDryPanGains(&device->Dry, coeffs, slot->Params.Gain * props->Distortion.Gain,
state->Gain);
}
static ALvoid ALdistortionState_process(ALdistortionState *state, ALsizei SamplesToDo, const ALfloat (*restrict SamplesIn)[BUFFERSIZE], ALfloat (*restrict SamplesOut)[BUFFERSIZE], ALsizei NumChannels)
{
ALfloat (*restrict buffer)[BUFFERSIZE] = state->Buffer;
const ALfloat fc = state->edge_coeff;
ALsizei base;
ALsizei i, k;
for(base = 0;base < SamplesToDo;)
{
/* Perform 4x oversampling to avoid aliasing. Oversampling greatly
* improves distortion quality and allows to implement lowpass and
* bandpass filters using high frequencies, at which classic IIR
* filters became unstable.
*/
ALsizei todo = mini(BUFFERSIZE, (SamplesToDo-base) * 4);
/* Fill oversample buffer using zero stuffing. Multiply the sample by
* the amount of oversampling to maintain the signal's power.
*/
for(i = 0;i < todo;i++)
buffer[0][i] = !(i&3) ? SamplesIn[0][(i>>2)+base] * 4.0f : 0.0f;
/* First step, do lowpass filtering of original signal. Additionally
* perform buffer interpolation and lowpass cutoff for oversampling
* (which is fortunately first step of distortion). So combine three
* operations into the one.
*/
BiquadFilter_process(&state->lowpass, buffer[1], buffer[0], todo);
/* Second step, do distortion using waveshaper function to emulate
* signal processing during tube overdriving. Three steps of
* waveshaping are intended to modify waveform without boost/clipping/
* attenuation process.
*/
for(i = 0;i < todo;i++)
{
ALfloat smp = buffer[1][i];
smp = (1.0f + fc) * smp/(1.0f + fc*fabsf(smp));
smp = (1.0f + fc) * smp/(1.0f + fc*fabsf(smp)) * -1.0f;
smp = (1.0f + fc) * smp/(1.0f + fc*fabsf(smp));
buffer[0][i] = smp;
}
/* Third step, do bandpass filtering of distorted signal. */
BiquadFilter_process(&state->bandpass, buffer[1], buffer[0], todo);
todo >>= 2;
for(k = 0;k < NumChannels;k++)
{
/* Fourth step, final, do attenuation and perform decimation,
* storing only one sample out of four.
*/
ALfloat gain = state->Gain[k];
if(!(fabsf(gain) > GAIN_SILENCE_THRESHOLD))
continue;
for(i = 0;i < todo;i++)
SamplesOut[k][base+i] += gain * buffer[1][i*4];
}
base += todo;
}
}
typedef struct DistortionStateFactory {
DERIVE_FROM_TYPE(EffectStateFactory);
} DistortionStateFactory;
static ALeffectState *DistortionStateFactory_create(DistortionStateFactory *UNUSED(factory))
{
ALdistortionState *state;
NEW_OBJ0(state, ALdistortionState)();
if(!state) return NULL;
return STATIC_CAST(ALeffectState, state);
}
DEFINE_EFFECTSTATEFACTORY_VTABLE(DistortionStateFactory);
EffectStateFactory *DistortionStateFactory_getFactory(void)
{
static DistortionStateFactory DistortionFactory = { { GET_VTABLE2(DistortionStateFactory, EffectStateFactory) } };
return STATIC_CAST(EffectStateFactory, &DistortionFactory);
}
void ALdistortion_setParami(ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, ALint UNUSED(val))
{ alSetError(context, AL_INVALID_ENUM, "Invalid distortion integer property 0x%04x", param); }
void ALdistortion_setParamiv(ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, const ALint *UNUSED(vals))
{ alSetError(context, AL_INVALID_ENUM, "Invalid distortion integer-vector property 0x%04x", param); }
void ALdistortion_setParamf(ALeffect *effect, ALCcontext *context, ALenum param, ALfloat val)
{
ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_DISTORTION_EDGE:
if(!(val >= AL_DISTORTION_MIN_EDGE && val <= AL_DISTORTION_MAX_EDGE))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Distortion edge out of range");
props->Distortion.Edge = val;
break;
case AL_DISTORTION_GAIN:
if(!(val >= AL_DISTORTION_MIN_GAIN && val <= AL_DISTORTION_MAX_GAIN))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Distortion gain out of range");
props->Distortion.Gain = val;
break;
case AL_DISTORTION_LOWPASS_CUTOFF:
if(!(val >= AL_DISTORTION_MIN_LOWPASS_CUTOFF && val <= AL_DISTORTION_MAX_LOWPASS_CUTOFF))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Distortion low-pass cutoff out of range");
props->Distortion.LowpassCutoff = val;
break;
case AL_DISTORTION_EQCENTER:
if(!(val >= AL_DISTORTION_MIN_EQCENTER && val <= AL_DISTORTION_MAX_EQCENTER))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Distortion EQ center out of range");
props->Distortion.EQCenter = val;
break;
case AL_DISTORTION_EQBANDWIDTH:
if(!(val >= AL_DISTORTION_MIN_EQBANDWIDTH && val <= AL_DISTORTION_MAX_EQBANDWIDTH))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Distortion EQ bandwidth out of range");
props->Distortion.EQBandwidth = val;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid distortion float property 0x%04x",
param);
}
}
void ALdistortion_setParamfv(ALeffect *effect, ALCcontext *context, ALenum param, const ALfloat *vals)
{ ALdistortion_setParamf(effect, context, param, vals[0]); }
void ALdistortion_getParami(const ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, ALint *UNUSED(val))
{ alSetError(context, AL_INVALID_ENUM, "Invalid distortion integer property 0x%04x", param); }
void ALdistortion_getParamiv(const ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, ALint *UNUSED(vals))
{ alSetError(context, AL_INVALID_ENUM, "Invalid distortion integer-vector property 0x%04x", param); }
void ALdistortion_getParamf(const ALeffect *effect, ALCcontext *context, ALenum param, ALfloat *val)
{
const ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_DISTORTION_EDGE:
*val = props->Distortion.Edge;
break;
case AL_DISTORTION_GAIN:
*val = props->Distortion.Gain;
break;
case AL_DISTORTION_LOWPASS_CUTOFF:
*val = props->Distortion.LowpassCutoff;
break;
case AL_DISTORTION_EQCENTER:
*val = props->Distortion.EQCenter;
break;
case AL_DISTORTION_EQBANDWIDTH:
*val = props->Distortion.EQBandwidth;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid distortion float property 0x%04x",
param);
}
}
void ALdistortion_getParamfv(const ALeffect *effect, ALCcontext *context, ALenum param, ALfloat *vals)
{ ALdistortion_getParamf(effect, context, param, vals); }
DEFINE_ALEFFECT_VTABLE(ALdistortion);

View file

@ -0,0 +1,167 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2013 by Mike Gorchak
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <algorithm>
#include <cmath>
#include <cstdlib>
#include "alcmain.h"
#include "alcontext.h"
#include "core/filters/biquad.h"
#include "effectslot.h"
namespace {
struct DistortionState final : public EffectState {
/* Effect gains for each channel */
float mGain[MAX_OUTPUT_CHANNELS]{};
/* Effect parameters */
BiquadFilter mLowpass;
BiquadFilter mBandpass;
float mAttenuation{};
float mEdgeCoeff{};
float mBuffer[2][BufferLineSize]{};
void deviceUpdate(const ALCdevice *device, const Buffer &buffer) override;
void update(const ALCcontext *context, const EffectSlot *slot, const EffectProps *props,
const EffectTarget target) override;
void process(const size_t samplesToDo, const al::span<const FloatBufferLine> samplesIn,
const al::span<FloatBufferLine> samplesOut) override;
DEF_NEWDEL(DistortionState)
};
void DistortionState::deviceUpdate(const ALCdevice*, const Buffer&)
{
mLowpass.clear();
mBandpass.clear();
}
void DistortionState::update(const ALCcontext *context, const EffectSlot *slot,
const EffectProps *props, const EffectTarget target)
{
const ALCdevice *device{context->mDevice.get()};
/* Store waveshaper edge settings. */
const float edge{minf(std::sin(al::MathDefs<float>::Pi()*0.5f * props->Distortion.Edge),
0.99f)};
mEdgeCoeff = 2.0f * edge / (1.0f-edge);
float cutoff{props->Distortion.LowpassCutoff};
/* Bandwidth value is constant in octaves. */
float bandwidth{(cutoff / 2.0f) / (cutoff * 0.67f)};
/* Divide normalized frequency by the amount of oversampling done during
* processing.
*/
auto frequency = static_cast<float>(device->Frequency);
mLowpass.setParamsFromBandwidth(BiquadType::LowPass, cutoff/frequency/4.0f, 1.0f, bandwidth);
cutoff = props->Distortion.EQCenter;
/* Convert bandwidth in Hz to octaves. */
bandwidth = props->Distortion.EQBandwidth / (cutoff * 0.67f);
mBandpass.setParamsFromBandwidth(BiquadType::BandPass, cutoff/frequency/4.0f, 1.0f, bandwidth);
const auto coeffs = CalcDirectionCoeffs({0.0f, 0.0f, -1.0f}, 0.0f);
mOutTarget = target.Main->Buffer;
ComputePanGains(target.Main, coeffs.data(), slot->Gain*props->Distortion.Gain, mGain);
}
void DistortionState::process(const size_t samplesToDo, const al::span<const FloatBufferLine> samplesIn, const al::span<FloatBufferLine> samplesOut)
{
const float fc{mEdgeCoeff};
for(size_t base{0u};base < samplesToDo;)
{
/* Perform 4x oversampling to avoid aliasing. Oversampling greatly
* improves distortion quality and allows to implement lowpass and
* bandpass filters using high frequencies, at which classic IIR
* filters became unstable.
*/
size_t todo{minz(BufferLineSize, (samplesToDo-base) * 4)};
/* Fill oversample buffer using zero stuffing. Multiply the sample by
* the amount of oversampling to maintain the signal's power.
*/
for(size_t i{0u};i < todo;i++)
mBuffer[0][i] = !(i&3) ? samplesIn[0][(i>>2)+base] * 4.0f : 0.0f;
/* First step, do lowpass filtering of original signal. Additionally
* perform buffer interpolation and lowpass cutoff for oversampling
* (which is fortunately first step of distortion). So combine three
* operations into the one.
*/
mLowpass.process({mBuffer[0], todo}, mBuffer[1]);
/* Second step, do distortion using waveshaper function to emulate
* signal processing during tube overdriving. Three steps of
* waveshaping are intended to modify waveform without boost/clipping/
* attenuation process.
*/
auto proc_sample = [fc](float smp) -> float
{
smp = (1.0f + fc) * smp/(1.0f + fc*std::abs(smp));
smp = (1.0f + fc) * smp/(1.0f + fc*std::abs(smp)) * -1.0f;
smp = (1.0f + fc) * smp/(1.0f + fc*std::abs(smp));
return smp;
};
std::transform(std::begin(mBuffer[1]), std::begin(mBuffer[1])+todo, std::begin(mBuffer[0]),
proc_sample);
/* Third step, do bandpass filtering of distorted signal. */
mBandpass.process({mBuffer[0], todo}, mBuffer[1]);
todo >>= 2;
const float *outgains{mGain};
for(FloatBufferLine &output : samplesOut)
{
/* Fourth step, final, do attenuation and perform decimation,
* storing only one sample out of four.
*/
const float gain{*(outgains++)};
if(!(std::fabs(gain) > GainSilenceThreshold))
continue;
for(size_t i{0u};i < todo;i++)
output[base+i] += gain * mBuffer[1][i*4];
}
base += todo;
}
}
struct DistortionStateFactory final : public EffectStateFactory {
al::intrusive_ptr<EffectState> create() override
{ return al::intrusive_ptr<EffectState>{new DistortionState{}}; }
};
} // namespace
EffectStateFactory *DistortionStateFactory_getFactory()
{
static DistortionStateFactory DistortionFactory{};
return &DistortionFactory;
}

View file

@ -1,310 +0,0 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2009 by Chris Robinson.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <math.h>
#include <stdlib.h>
#include "alMain.h"
#include "alFilter.h"
#include "alAuxEffectSlot.h"
#include "alError.h"
#include "alu.h"
#include "filters/defs.h"
typedef struct ALechoState {
DERIVE_FROM_TYPE(ALeffectState);
ALfloat *SampleBuffer;
ALsizei BufferLength;
// The echo is two tap. The delay is the number of samples from before the
// current offset
struct {
ALsizei delay;
} Tap[2];
ALsizei Offset;
/* The panning gains for the two taps */
struct {
ALfloat Current[MAX_OUTPUT_CHANNELS];
ALfloat Target[MAX_OUTPUT_CHANNELS];
} Gains[2];
ALfloat FeedGain;
BiquadFilter Filter;
} ALechoState;
static ALvoid ALechoState_Destruct(ALechoState *state);
static ALboolean ALechoState_deviceUpdate(ALechoState *state, ALCdevice *Device);
static ALvoid ALechoState_update(ALechoState *state, const ALCcontext *context, const ALeffectslot *slot, const ALeffectProps *props);
static ALvoid ALechoState_process(ALechoState *state, ALsizei SamplesToDo, const ALfloat (*restrict SamplesIn)[BUFFERSIZE], ALfloat (*restrict SamplesOut)[BUFFERSIZE], ALsizei NumChannels);
DECLARE_DEFAULT_ALLOCATORS(ALechoState)
DEFINE_ALEFFECTSTATE_VTABLE(ALechoState);
static void ALechoState_Construct(ALechoState *state)
{
ALeffectState_Construct(STATIC_CAST(ALeffectState, state));
SET_VTABLE2(ALechoState, ALeffectState, state);
state->BufferLength = 0;
state->SampleBuffer = NULL;
state->Tap[0].delay = 0;
state->Tap[1].delay = 0;
state->Offset = 0;
BiquadFilter_clear(&state->Filter);
}
static ALvoid ALechoState_Destruct(ALechoState *state)
{
al_free(state->SampleBuffer);
state->SampleBuffer = NULL;
ALeffectState_Destruct(STATIC_CAST(ALeffectState,state));
}
static ALboolean ALechoState_deviceUpdate(ALechoState *state, ALCdevice *Device)
{
ALsizei maxlen;
// Use the next power of 2 for the buffer length, so the tap offsets can be
// wrapped using a mask instead of a modulo
maxlen = float2int(AL_ECHO_MAX_DELAY*Device->Frequency + 0.5f) +
float2int(AL_ECHO_MAX_LRDELAY*Device->Frequency + 0.5f);
maxlen = NextPowerOf2(maxlen);
if(maxlen <= 0) return AL_FALSE;
if(maxlen != state->BufferLength)
{
void *temp = al_calloc(16, maxlen * sizeof(ALfloat));
if(!temp) return AL_FALSE;
al_free(state->SampleBuffer);
state->SampleBuffer = temp;
state->BufferLength = maxlen;
}
memset(state->SampleBuffer, 0, state->BufferLength*sizeof(ALfloat));
memset(state->Gains, 0, sizeof(state->Gains));
return AL_TRUE;
}
static ALvoid ALechoState_update(ALechoState *state, const ALCcontext *context, const ALeffectslot *slot, const ALeffectProps *props)
{
const ALCdevice *device = context->Device;
ALuint frequency = device->Frequency;
ALfloat coeffs[MAX_AMBI_COEFFS];
ALfloat gainhf, lrpan, spread;
state->Tap[0].delay = maxi(float2int(props->Echo.Delay*frequency + 0.5f), 1);
state->Tap[1].delay = float2int(props->Echo.LRDelay*frequency + 0.5f);
state->Tap[1].delay += state->Tap[0].delay;
spread = props->Echo.Spread;
if(spread < 0.0f) lrpan = -1.0f;
else lrpan = 1.0f;
/* Convert echo spread (where 0 = omni, +/-1 = directional) to coverage
* spread (where 0 = point, tau = omni).
*/
spread = asinf(1.0f - fabsf(spread))*4.0f;
state->FeedGain = props->Echo.Feedback;
gainhf = maxf(1.0f - props->Echo.Damping, 0.0625f); /* Limit -24dB */
BiquadFilter_setParams(&state->Filter, BiquadType_HighShelf,
gainhf, LOWPASSFREQREF/frequency, calc_rcpQ_from_slope(gainhf, 1.0f)
);
/* First tap panning */
CalcAngleCoeffs(-F_PI_2*lrpan, 0.0f, spread, coeffs);
ComputeDryPanGains(&device->Dry, coeffs, slot->Params.Gain, state->Gains[0].Target);
/* Second tap panning */
CalcAngleCoeffs( F_PI_2*lrpan, 0.0f, spread, coeffs);
ComputeDryPanGains(&device->Dry, coeffs, slot->Params.Gain, state->Gains[1].Target);
}
static ALvoid ALechoState_process(ALechoState *state, ALsizei SamplesToDo, const ALfloat (*restrict SamplesIn)[BUFFERSIZE], ALfloat (*restrict SamplesOut)[BUFFERSIZE], ALsizei NumChannels)
{
const ALsizei mask = state->BufferLength-1;
const ALsizei tap1 = state->Tap[0].delay;
const ALsizei tap2 = state->Tap[1].delay;
ALfloat *restrict delaybuf = state->SampleBuffer;
ALsizei offset = state->Offset;
ALfloat z1, z2, in, out;
ALsizei base;
ALsizei c, i;
z1 = state->Filter.z1;
z2 = state->Filter.z2;
for(base = 0;base < SamplesToDo;)
{
alignas(16) ALfloat temps[2][128];
ALsizei td = mini(128, SamplesToDo-base);
for(i = 0;i < td;i++)
{
/* Feed the delay buffer's input first. */
delaybuf[offset&mask] = SamplesIn[0][i+base];
/* First tap */
temps[0][i] = delaybuf[(offset-tap1) & mask];
/* Second tap */
temps[1][i] = delaybuf[(offset-tap2) & mask];
/* Apply damping to the second tap, then add it to the buffer with
* feedback attenuation.
*/
in = temps[1][i];
out = in*state->Filter.b0 + z1;
z1 = in*state->Filter.b1 - out*state->Filter.a1 + z2;
z2 = in*state->Filter.b2 - out*state->Filter.a2;
delaybuf[offset&mask] += out * state->FeedGain;
offset++;
}
for(c = 0;c < 2;c++)
MixSamples(temps[c], NumChannels, SamplesOut, state->Gains[c].Current,
state->Gains[c].Target, SamplesToDo-base, base, td);
base += td;
}
state->Filter.z1 = z1;
state->Filter.z2 = z2;
state->Offset = offset;
}
typedef struct EchoStateFactory {
DERIVE_FROM_TYPE(EffectStateFactory);
} EchoStateFactory;
ALeffectState *EchoStateFactory_create(EchoStateFactory *UNUSED(factory))
{
ALechoState *state;
NEW_OBJ0(state, ALechoState)();
if(!state) return NULL;
return STATIC_CAST(ALeffectState, state);
}
DEFINE_EFFECTSTATEFACTORY_VTABLE(EchoStateFactory);
EffectStateFactory *EchoStateFactory_getFactory(void)
{
static EchoStateFactory EchoFactory = { { GET_VTABLE2(EchoStateFactory, EffectStateFactory) } };
return STATIC_CAST(EffectStateFactory, &EchoFactory);
}
void ALecho_setParami(ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, ALint UNUSED(val))
{ alSetError(context, AL_INVALID_ENUM, "Invalid echo integer property 0x%04x", param); }
void ALecho_setParamiv(ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, const ALint *UNUSED(vals))
{ alSetError(context, AL_INVALID_ENUM, "Invalid echo integer-vector property 0x%04x", param); }
void ALecho_setParamf(ALeffect *effect, ALCcontext *context, ALenum param, ALfloat val)
{
ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_ECHO_DELAY:
if(!(val >= AL_ECHO_MIN_DELAY && val <= AL_ECHO_MAX_DELAY))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Echo delay out of range");
props->Echo.Delay = val;
break;
case AL_ECHO_LRDELAY:
if(!(val >= AL_ECHO_MIN_LRDELAY && val <= AL_ECHO_MAX_LRDELAY))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Echo LR delay out of range");
props->Echo.LRDelay = val;
break;
case AL_ECHO_DAMPING:
if(!(val >= AL_ECHO_MIN_DAMPING && val <= AL_ECHO_MAX_DAMPING))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Echo damping out of range");
props->Echo.Damping = val;
break;
case AL_ECHO_FEEDBACK:
if(!(val >= AL_ECHO_MIN_FEEDBACK && val <= AL_ECHO_MAX_FEEDBACK))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Echo feedback out of range");
props->Echo.Feedback = val;
break;
case AL_ECHO_SPREAD:
if(!(val >= AL_ECHO_MIN_SPREAD && val <= AL_ECHO_MAX_SPREAD))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Echo spread out of range");
props->Echo.Spread = val;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid echo float property 0x%04x", param);
}
}
void ALecho_setParamfv(ALeffect *effect, ALCcontext *context, ALenum param, const ALfloat *vals)
{ ALecho_setParamf(effect, context, param, vals[0]); }
void ALecho_getParami(const ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, ALint *UNUSED(val))
{ alSetError(context, AL_INVALID_ENUM, "Invalid echo integer property 0x%04x", param); }
void ALecho_getParamiv(const ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, ALint *UNUSED(vals))
{ alSetError(context, AL_INVALID_ENUM, "Invalid echo integer-vector property 0x%04x", param); }
void ALecho_getParamf(const ALeffect *effect, ALCcontext *context, ALenum param, ALfloat *val)
{
const ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_ECHO_DELAY:
*val = props->Echo.Delay;
break;
case AL_ECHO_LRDELAY:
*val = props->Echo.LRDelay;
break;
case AL_ECHO_DAMPING:
*val = props->Echo.Damping;
break;
case AL_ECHO_FEEDBACK:
*val = props->Echo.Feedback;
break;
case AL_ECHO_SPREAD:
*val = props->Echo.Spread;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid echo float property 0x%04x", param);
}
}
void ALecho_getParamfv(const ALeffect *effect, ALCcontext *context, ALenum param, ALfloat *vals)
{ ALecho_getParamf(effect, context, param, vals); }
DEFINE_ALEFFECT_VTABLE(ALecho);

View file

@ -0,0 +1,168 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2009 by Chris Robinson.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <cmath>
#include <cstdlib>
#include <algorithm>
#include "alcmain.h"
#include "alcontext.h"
#include "core/filters/biquad.h"
#include "effectslot.h"
#include "vector.h"
namespace {
constexpr float LowpassFreqRef{5000.0f};
struct EchoState final : public EffectState {
al::vector<float,16> mSampleBuffer;
// The echo is two tap. The delay is the number of samples from before the
// current offset
struct {
size_t delay{0u};
} mTap[2];
size_t mOffset{0u};
/* The panning gains for the two taps */
struct {
float Current[MAX_OUTPUT_CHANNELS]{};
float Target[MAX_OUTPUT_CHANNELS]{};
} mGains[2];
BiquadFilter mFilter;
float mFeedGain{0.0f};
alignas(16) float mTempBuffer[2][BufferLineSize];
void deviceUpdate(const ALCdevice *device, const Buffer &buffer) override;
void update(const ALCcontext *context, const EffectSlot *slot, const EffectProps *props,
const EffectTarget target) override;
void process(const size_t samplesToDo, const al::span<const FloatBufferLine> samplesIn,
const al::span<FloatBufferLine> samplesOut) override;
DEF_NEWDEL(EchoState)
};
void EchoState::deviceUpdate(const ALCdevice *Device, const Buffer&)
{
const auto frequency = static_cast<float>(Device->Frequency);
// Use the next power of 2 for the buffer length, so the tap offsets can be
// wrapped using a mask instead of a modulo
const uint maxlen{NextPowerOf2(float2uint(EchoMaxDelay*frequency + 0.5f) +
float2uint(EchoMaxLRDelay*frequency + 0.5f))};
if(maxlen != mSampleBuffer.size())
al::vector<float,16>(maxlen).swap(mSampleBuffer);
std::fill(mSampleBuffer.begin(), mSampleBuffer.end(), 0.0f);
for(auto &e : mGains)
{
std::fill(std::begin(e.Current), std::end(e.Current), 0.0f);
std::fill(std::begin(e.Target), std::end(e.Target), 0.0f);
}
}
void EchoState::update(const ALCcontext *context, const EffectSlot *slot,
const EffectProps *props, const EffectTarget target)
{
const ALCdevice *device{context->mDevice.get()};
const auto frequency = static_cast<float>(device->Frequency);
mTap[0].delay = maxu(float2uint(props->Echo.Delay*frequency + 0.5f), 1);
mTap[1].delay = float2uint(props->Echo.LRDelay*frequency + 0.5f) + mTap[0].delay;
const float gainhf{maxf(1.0f - props->Echo.Damping, 0.0625f)}; /* Limit -24dB */
mFilter.setParamsFromSlope(BiquadType::HighShelf, LowpassFreqRef/frequency, gainhf, 1.0f);
mFeedGain = props->Echo.Feedback;
/* Convert echo spread (where 0 = center, +/-1 = sides) to angle. */
const float angle{std::asin(props->Echo.Spread)};
const auto coeffs0 = CalcAngleCoeffs(-angle, 0.0f, 0.0f);
const auto coeffs1 = CalcAngleCoeffs( angle, 0.0f, 0.0f);
mOutTarget = target.Main->Buffer;
ComputePanGains(target.Main, coeffs0.data(), slot->Gain, mGains[0].Target);
ComputePanGains(target.Main, coeffs1.data(), slot->Gain, mGains[1].Target);
}
void EchoState::process(const size_t samplesToDo, const al::span<const FloatBufferLine> samplesIn, const al::span<FloatBufferLine> samplesOut)
{
const size_t mask{mSampleBuffer.size()-1};
float *RESTRICT delaybuf{mSampleBuffer.data()};
size_t offset{mOffset};
size_t tap1{offset - mTap[0].delay};
size_t tap2{offset - mTap[1].delay};
float z1, z2;
ASSUME(samplesToDo > 0);
const BiquadFilter filter{mFilter};
std::tie(z1, z2) = mFilter.getComponents();
for(size_t i{0u};i < samplesToDo;)
{
offset &= mask;
tap1 &= mask;
tap2 &= mask;
size_t td{minz(mask+1 - maxz(offset, maxz(tap1, tap2)), samplesToDo-i)};
do {
/* Feed the delay buffer's input first. */
delaybuf[offset] = samplesIn[0][i];
/* Get delayed output from the first and second taps. Use the
* second tap for feedback.
*/
mTempBuffer[0][i] = delaybuf[tap1++];
mTempBuffer[1][i] = delaybuf[tap2++];
const float feedb{mTempBuffer[1][i++]};
/* Add feedback to the delay buffer with damping and attenuation. */
delaybuf[offset++] += filter.processOne(feedb, z1, z2) * mFeedGain;
} while(--td);
}
mFilter.setComponents(z1, z2);
mOffset = offset;
for(ALsizei c{0};c < 2;c++)
MixSamples({mTempBuffer[c], samplesToDo}, samplesOut, mGains[c].Current, mGains[c].Target,
samplesToDo, 0);
}
struct EchoStateFactory final : public EffectStateFactory {
al::intrusive_ptr<EffectState> create() override
{ return al::intrusive_ptr<EffectState>{new EchoState{}}; }
};
} // namespace
EffectStateFactory *EchoStateFactory_getFactory()
{
static EchoStateFactory EchoFactory{};
return &EchoFactory;
}

View file

@ -1,355 +0,0 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2013 by Mike Gorchak
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <math.h>
#include <stdlib.h>
#include "alMain.h"
#include "alAuxEffectSlot.h"
#include "alError.h"
#include "alu.h"
#include "filters/defs.h"
/* The document "Effects Extension Guide.pdf" says that low and high *
* frequencies are cutoff frequencies. This is not fully correct, they *
* are corner frequencies for low and high shelf filters. If they were *
* just cutoff frequencies, there would be no need in cutoff frequency *
* gains, which are present. Documentation for "Creative Proteus X2" *
* software describes 4-band equalizer functionality in a much better *
* way. This equalizer seems to be a predecessor of OpenAL 4-band *
* equalizer. With low and high shelf filters we are able to cutoff *
* frequencies below and/or above corner frequencies using attenuation *
* gains (below 1.0) and amplify all low and/or high frequencies using *
* gains above 1.0. *
* *
* Low-shelf Low Mid Band High Mid Band High-shelf *
* corner center center corner *
* frequency frequency frequency frequency *
* 50Hz..800Hz 200Hz..3000Hz 1000Hz..8000Hz 4000Hz..16000Hz *
* *
* | | | | *
* | | | | *
* B -----+ /--+--\ /--+--\ +----- *
* O |\ | | | | | | /| *
* O | \ - | - - | - / | *
* S + | \ | | | | | | / | *
* T | | | | | | | | | | *
* ---------+---------------+------------------+---------------+-------- *
* C | | | | | | | | | | *
* U - | / | | | | | | \ | *
* T | / - | - - | - \ | *
* O |/ | | | | | | \| *
* F -----+ \--+--/ \--+--/ +----- *
* F | | | | *
* | | | | *
* *
* Gains vary from 0.126 up to 7.943, which means from -18dB attenuation *
* up to +18dB amplification. Band width varies from 0.01 up to 1.0 in *
* octaves for two mid bands. *
* *
* Implementation is based on the "Cookbook formulae for audio EQ biquad *
* filter coefficients" by Robert Bristow-Johnson *
* http://www.musicdsp.org/files/Audio-EQ-Cookbook.txt */
typedef struct ALequalizerState {
DERIVE_FROM_TYPE(ALeffectState);
struct {
/* Effect gains for each channel */
ALfloat CurrentGains[MAX_OUTPUT_CHANNELS];
ALfloat TargetGains[MAX_OUTPUT_CHANNELS];
/* Effect parameters */
BiquadFilter filter[4];
} Chans[MAX_EFFECT_CHANNELS];
ALfloat SampleBuffer[MAX_EFFECT_CHANNELS][BUFFERSIZE];
} ALequalizerState;
static ALvoid ALequalizerState_Destruct(ALequalizerState *state);
static ALboolean ALequalizerState_deviceUpdate(ALequalizerState *state, ALCdevice *device);
static ALvoid ALequalizerState_update(ALequalizerState *state, const ALCcontext *context, const ALeffectslot *slot, const ALeffectProps *props);
static ALvoid ALequalizerState_process(ALequalizerState *state, ALsizei SamplesToDo, const ALfloat (*restrict SamplesIn)[BUFFERSIZE], ALfloat (*restrict SamplesOut)[BUFFERSIZE], ALsizei NumChannels);
DECLARE_DEFAULT_ALLOCATORS(ALequalizerState)
DEFINE_ALEFFECTSTATE_VTABLE(ALequalizerState);
static void ALequalizerState_Construct(ALequalizerState *state)
{
ALeffectState_Construct(STATIC_CAST(ALeffectState, state));
SET_VTABLE2(ALequalizerState, ALeffectState, state);
}
static ALvoid ALequalizerState_Destruct(ALequalizerState *state)
{
ALeffectState_Destruct(STATIC_CAST(ALeffectState,state));
}
static ALboolean ALequalizerState_deviceUpdate(ALequalizerState *state, ALCdevice *UNUSED(device))
{
ALsizei i, j;
for(i = 0; i < MAX_EFFECT_CHANNELS;i++)
{
for(j = 0;j < 4;j++)
BiquadFilter_clear(&state->Chans[i].filter[j]);
for(j = 0;j < MAX_OUTPUT_CHANNELS;j++)
state->Chans[i].CurrentGains[j] = 0.0f;
}
return AL_TRUE;
}
static ALvoid ALequalizerState_update(ALequalizerState *state, const ALCcontext *context, const ALeffectslot *slot, const ALeffectProps *props)
{
const ALCdevice *device = context->Device;
ALfloat frequency = (ALfloat)device->Frequency;
ALfloat gain, f0norm;
ALuint i;
STATIC_CAST(ALeffectState,state)->OutBuffer = device->FOAOut.Buffer;
STATIC_CAST(ALeffectState,state)->OutChannels = device->FOAOut.NumChannels;
for(i = 0;i < MAX_EFFECT_CHANNELS;i++)
ComputeFirstOrderGains(&device->FOAOut, IdentityMatrixf.m[i],
slot->Params.Gain, state->Chans[i].TargetGains);
/* Calculate coefficients for the each type of filter. Note that the shelf
* filters' gain is for the reference frequency, which is the centerpoint
* of the transition band.
*/
gain = maxf(sqrtf(props->Equalizer.LowGain), 0.0625f); /* Limit -24dB */
f0norm = props->Equalizer.LowCutoff/frequency;
BiquadFilter_setParams(&state->Chans[0].filter[0], BiquadType_LowShelf,
gain, f0norm, calc_rcpQ_from_slope(gain, 0.75f)
);
gain = maxf(props->Equalizer.Mid1Gain, 0.0625f);
f0norm = props->Equalizer.Mid1Center/frequency;
BiquadFilter_setParams(&state->Chans[0].filter[1], BiquadType_Peaking,
gain, f0norm, calc_rcpQ_from_bandwidth(
f0norm, props->Equalizer.Mid1Width
)
);
gain = maxf(props->Equalizer.Mid2Gain, 0.0625f);
f0norm = props->Equalizer.Mid2Center/frequency;
BiquadFilter_setParams(&state->Chans[0].filter[2], BiquadType_Peaking,
gain, f0norm, calc_rcpQ_from_bandwidth(
f0norm, props->Equalizer.Mid2Width
)
);
gain = maxf(sqrtf(props->Equalizer.HighGain), 0.0625f);
f0norm = props->Equalizer.HighCutoff/frequency;
BiquadFilter_setParams(&state->Chans[0].filter[3], BiquadType_HighShelf,
gain, f0norm, calc_rcpQ_from_slope(gain, 0.75f)
);
/* Copy the filter coefficients for the other input channels. */
for(i = 1;i < MAX_EFFECT_CHANNELS;i++)
{
BiquadFilter_copyParams(&state->Chans[i].filter[0], &state->Chans[0].filter[0]);
BiquadFilter_copyParams(&state->Chans[i].filter[1], &state->Chans[0].filter[1]);
BiquadFilter_copyParams(&state->Chans[i].filter[2], &state->Chans[0].filter[2]);
BiquadFilter_copyParams(&state->Chans[i].filter[3], &state->Chans[0].filter[3]);
}
}
static ALvoid ALequalizerState_process(ALequalizerState *state, ALsizei SamplesToDo, const ALfloat (*restrict SamplesIn)[BUFFERSIZE], ALfloat (*restrict SamplesOut)[BUFFERSIZE], ALsizei NumChannels)
{
ALfloat (*restrict temps)[BUFFERSIZE] = state->SampleBuffer;
ALsizei c;
for(c = 0;c < MAX_EFFECT_CHANNELS;c++)
{
BiquadFilter_process(&state->Chans[c].filter[0], temps[0], SamplesIn[c], SamplesToDo);
BiquadFilter_process(&state->Chans[c].filter[1], temps[1], temps[0], SamplesToDo);
BiquadFilter_process(&state->Chans[c].filter[2], temps[2], temps[1], SamplesToDo);
BiquadFilter_process(&state->Chans[c].filter[3], temps[3], temps[2], SamplesToDo);
MixSamples(temps[3], NumChannels, SamplesOut,
state->Chans[c].CurrentGains, state->Chans[c].TargetGains,
SamplesToDo, 0, SamplesToDo
);
}
}
typedef struct EqualizerStateFactory {
DERIVE_FROM_TYPE(EffectStateFactory);
} EqualizerStateFactory;
ALeffectState *EqualizerStateFactory_create(EqualizerStateFactory *UNUSED(factory))
{
ALequalizerState *state;
NEW_OBJ0(state, ALequalizerState)();
if(!state) return NULL;
return STATIC_CAST(ALeffectState, state);
}
DEFINE_EFFECTSTATEFACTORY_VTABLE(EqualizerStateFactory);
EffectStateFactory *EqualizerStateFactory_getFactory(void)
{
static EqualizerStateFactory EqualizerFactory = { { GET_VTABLE2(EqualizerStateFactory, EffectStateFactory) } };
return STATIC_CAST(EffectStateFactory, &EqualizerFactory);
}
void ALequalizer_setParami(ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, ALint UNUSED(val))
{ alSetError(context, AL_INVALID_ENUM, "Invalid equalizer integer property 0x%04x", param); }
void ALequalizer_setParamiv(ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, const ALint *UNUSED(vals))
{ alSetError(context, AL_INVALID_ENUM, "Invalid equalizer integer-vector property 0x%04x", param); }
void ALequalizer_setParamf(ALeffect *effect, ALCcontext *context, ALenum param, ALfloat val)
{
ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_EQUALIZER_LOW_GAIN:
if(!(val >= AL_EQUALIZER_MIN_LOW_GAIN && val <= AL_EQUALIZER_MAX_LOW_GAIN))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Equalizer low-band gain out of range");
props->Equalizer.LowGain = val;
break;
case AL_EQUALIZER_LOW_CUTOFF:
if(!(val >= AL_EQUALIZER_MIN_LOW_CUTOFF && val <= AL_EQUALIZER_MAX_LOW_CUTOFF))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Equalizer low-band cutoff out of range");
props->Equalizer.LowCutoff = val;
break;
case AL_EQUALIZER_MID1_GAIN:
if(!(val >= AL_EQUALIZER_MIN_MID1_GAIN && val <= AL_EQUALIZER_MAX_MID1_GAIN))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Equalizer mid1-band gain out of range");
props->Equalizer.Mid1Gain = val;
break;
case AL_EQUALIZER_MID1_CENTER:
if(!(val >= AL_EQUALIZER_MIN_MID1_CENTER && val <= AL_EQUALIZER_MAX_MID1_CENTER))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Equalizer mid1-band center out of range");
props->Equalizer.Mid1Center = val;
break;
case AL_EQUALIZER_MID1_WIDTH:
if(!(val >= AL_EQUALIZER_MIN_MID1_WIDTH && val <= AL_EQUALIZER_MAX_MID1_WIDTH))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Equalizer mid1-band width out of range");
props->Equalizer.Mid1Width = val;
break;
case AL_EQUALIZER_MID2_GAIN:
if(!(val >= AL_EQUALIZER_MIN_MID2_GAIN && val <= AL_EQUALIZER_MAX_MID2_GAIN))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Equalizer mid2-band gain out of range");
props->Equalizer.Mid2Gain = val;
break;
case AL_EQUALIZER_MID2_CENTER:
if(!(val >= AL_EQUALIZER_MIN_MID2_CENTER && val <= AL_EQUALIZER_MAX_MID2_CENTER))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Equalizer mid2-band center out of range");
props->Equalizer.Mid2Center = val;
break;
case AL_EQUALIZER_MID2_WIDTH:
if(!(val >= AL_EQUALIZER_MIN_MID2_WIDTH && val <= AL_EQUALIZER_MAX_MID2_WIDTH))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Equalizer mid2-band width out of range");
props->Equalizer.Mid2Width = val;
break;
case AL_EQUALIZER_HIGH_GAIN:
if(!(val >= AL_EQUALIZER_MIN_HIGH_GAIN && val <= AL_EQUALIZER_MAX_HIGH_GAIN))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Equalizer high-band gain out of range");
props->Equalizer.HighGain = val;
break;
case AL_EQUALIZER_HIGH_CUTOFF:
if(!(val >= AL_EQUALIZER_MIN_HIGH_CUTOFF && val <= AL_EQUALIZER_MAX_HIGH_CUTOFF))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Equalizer high-band cutoff out of range");
props->Equalizer.HighCutoff = val;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid equalizer float property 0x%04x", param);
}
}
void ALequalizer_setParamfv(ALeffect *effect, ALCcontext *context, ALenum param, const ALfloat *vals)
{ ALequalizer_setParamf(effect, context, param, vals[0]); }
void ALequalizer_getParami(const ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, ALint *UNUSED(val))
{ alSetError(context, AL_INVALID_ENUM, "Invalid equalizer integer property 0x%04x", param); }
void ALequalizer_getParamiv(const ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, ALint *UNUSED(vals))
{ alSetError(context, AL_INVALID_ENUM, "Invalid equalizer integer-vector property 0x%04x", param); }
void ALequalizer_getParamf(const ALeffect *effect, ALCcontext *context, ALenum param, ALfloat *val)
{
const ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_EQUALIZER_LOW_GAIN:
*val = props->Equalizer.LowGain;
break;
case AL_EQUALIZER_LOW_CUTOFF:
*val = props->Equalizer.LowCutoff;
break;
case AL_EQUALIZER_MID1_GAIN:
*val = props->Equalizer.Mid1Gain;
break;
case AL_EQUALIZER_MID1_CENTER:
*val = props->Equalizer.Mid1Center;
break;
case AL_EQUALIZER_MID1_WIDTH:
*val = props->Equalizer.Mid1Width;
break;
case AL_EQUALIZER_MID2_GAIN:
*val = props->Equalizer.Mid2Gain;
break;
case AL_EQUALIZER_MID2_CENTER:
*val = props->Equalizer.Mid2Center;
break;
case AL_EQUALIZER_MID2_WIDTH:
*val = props->Equalizer.Mid2Width;
break;
case AL_EQUALIZER_HIGH_GAIN:
*val = props->Equalizer.HighGain;
break;
case AL_EQUALIZER_HIGH_CUTOFF:
*val = props->Equalizer.HighCutoff;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid equalizer float property 0x%04x", param);
}
}
void ALequalizer_getParamfv(const ALeffect *effect, ALCcontext *context, ALenum param, ALfloat *vals)
{ ALequalizer_getParamf(effect, context, param, vals); }
DEFINE_ALEFFECT_VTABLE(ALequalizer);

View file

@ -0,0 +1,184 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2013 by Mike Gorchak
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <cmath>
#include <cstdlib>
#include <algorithm>
#include <functional>
#include "alcmain.h"
#include "alcontext.h"
#include "core/filters/biquad.h"
#include "effectslot.h"
#include "vecmat.h"
namespace {
/* The document "Effects Extension Guide.pdf" says that low and high *
* frequencies are cutoff frequencies. This is not fully correct, they *
* are corner frequencies for low and high shelf filters. If they were *
* just cutoff frequencies, there would be no need in cutoff frequency *
* gains, which are present. Documentation for "Creative Proteus X2" *
* software describes 4-band equalizer functionality in a much better *
* way. This equalizer seems to be a predecessor of OpenAL 4-band *
* equalizer. With low and high shelf filters we are able to cutoff *
* frequencies below and/or above corner frequencies using attenuation *
* gains (below 1.0) and amplify all low and/or high frequencies using *
* gains above 1.0. *
* *
* Low-shelf Low Mid Band High Mid Band High-shelf *
* corner center center corner *
* frequency frequency frequency frequency *
* 50Hz..800Hz 200Hz..3000Hz 1000Hz..8000Hz 4000Hz..16000Hz *
* *
* | | | | *
* | | | | *
* B -----+ /--+--\ /--+--\ +----- *
* O |\ | | | | | | /| *
* O | \ - | - - | - / | *
* S + | \ | | | | | | / | *
* T | | | | | | | | | | *
* ---------+---------------+------------------+---------------+-------- *
* C | | | | | | | | | | *
* U - | / | | | | | | \ | *
* T | / - | - - | - \ | *
* O |/ | | | | | | \| *
* F -----+ \--+--/ \--+--/ +----- *
* F | | | | *
* | | | | *
* *
* Gains vary from 0.126 up to 7.943, which means from -18dB attenuation *
* up to +18dB amplification. Band width varies from 0.01 up to 1.0 in *
* octaves for two mid bands. *
* *
* Implementation is based on the "Cookbook formulae for audio EQ biquad *
* filter coefficients" by Robert Bristow-Johnson *
* http://www.musicdsp.org/files/Audio-EQ-Cookbook.txt */
struct EqualizerState final : public EffectState {
struct {
/* Effect parameters */
BiquadFilter filter[4];
/* Effect gains for each channel */
float CurrentGains[MAX_OUTPUT_CHANNELS]{};
float TargetGains[MAX_OUTPUT_CHANNELS]{};
} mChans[MaxAmbiChannels];
FloatBufferLine mSampleBuffer{};
void deviceUpdate(const ALCdevice *device, const Buffer &buffer) override;
void update(const ALCcontext *context, const EffectSlot *slot, const EffectProps *props,
const EffectTarget target) override;
void process(const size_t samplesToDo, const al::span<const FloatBufferLine> samplesIn,
const al::span<FloatBufferLine> samplesOut) override;
DEF_NEWDEL(EqualizerState)
};
void EqualizerState::deviceUpdate(const ALCdevice*, const Buffer&)
{
for(auto &e : mChans)
{
std::for_each(std::begin(e.filter), std::end(e.filter), std::mem_fn(&BiquadFilter::clear));
std::fill(std::begin(e.CurrentGains), std::end(e.CurrentGains), 0.0f);
}
}
void EqualizerState::update(const ALCcontext *context, const EffectSlot *slot,
const EffectProps *props, const EffectTarget target)
{
const ALCdevice *device{context->mDevice.get()};
auto frequency = static_cast<float>(device->Frequency);
float gain, f0norm;
/* Calculate coefficients for the each type of filter. Note that the shelf
* and peaking filters' gain is for the centerpoint of the transition band,
* while the effect property gains are for the shelf/peak itself. So the
* property gains need their dB halved (sqrt of linear gain) for the
* shelf/peak to reach the provided gain.
*/
gain = std::sqrt(props->Equalizer.LowGain);
f0norm = props->Equalizer.LowCutoff / frequency;
mChans[0].filter[0].setParamsFromSlope(BiquadType::LowShelf, f0norm, gain, 0.75f);
gain = std::sqrt(props->Equalizer.Mid1Gain);
f0norm = props->Equalizer.Mid1Center / frequency;
mChans[0].filter[1].setParamsFromBandwidth(BiquadType::Peaking, f0norm, gain,
props->Equalizer.Mid1Width);
gain = std::sqrt(props->Equalizer.Mid2Gain);
f0norm = props->Equalizer.Mid2Center / frequency;
mChans[0].filter[2].setParamsFromBandwidth(BiquadType::Peaking, f0norm, gain,
props->Equalizer.Mid2Width);
gain = std::sqrt(props->Equalizer.HighGain);
f0norm = props->Equalizer.HighCutoff / frequency;
mChans[0].filter[3].setParamsFromSlope(BiquadType::HighShelf, f0norm, gain, 0.75f);
/* Copy the filter coefficients for the other input channels. */
for(size_t i{1u};i < slot->Wet.Buffer.size();++i)
{
mChans[i].filter[0].copyParamsFrom(mChans[0].filter[0]);
mChans[i].filter[1].copyParamsFrom(mChans[0].filter[1]);
mChans[i].filter[2].copyParamsFrom(mChans[0].filter[2]);
mChans[i].filter[3].copyParamsFrom(mChans[0].filter[3]);
}
mOutTarget = target.Main->Buffer;
auto set_gains = [slot,target](auto &chan, al::span<const float,MaxAmbiChannels> coeffs)
{ ComputePanGains(target.Main, coeffs.data(), slot->Gain, chan.TargetGains); };
SetAmbiPanIdentity(std::begin(mChans), slot->Wet.Buffer.size(), set_gains);
}
void EqualizerState::process(const size_t samplesToDo, const al::span<const FloatBufferLine> samplesIn, const al::span<FloatBufferLine> samplesOut)
{
const al::span<float> buffer{mSampleBuffer.data(), samplesToDo};
auto chan = std::addressof(mChans[0]);
for(const auto &input : samplesIn)
{
const al::span<const float> inbuf{input.data(), samplesToDo};
DualBiquad{chan->filter[0], chan->filter[1]}.process(inbuf, buffer.begin());
DualBiquad{chan->filter[2], chan->filter[3]}.process(buffer, buffer.begin());
MixSamples(buffer, samplesOut, chan->CurrentGains, chan->TargetGains, samplesToDo, 0u);
++chan;
}
}
struct EqualizerStateFactory final : public EffectStateFactory {
al::intrusive_ptr<EffectState> create() override
{ return al::intrusive_ptr<EffectState>{new EqualizerState{}}; }
};
} // namespace
EffectStateFactory *EqualizerStateFactory_getFactory()
{
static EqualizerStateFactory EqualizerFactory{};
return &EqualizerFactory;
}

View file

@ -0,0 +1,232 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2018 by Raul Herraiz.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <cmath>
#include <cstdlib>
#include <array>
#include <complex>
#include <algorithm>
#include "alcmain.h"
#include "alcomplex.h"
#include "alcontext.h"
#include "alu.h"
#include "effectslot.h"
#include "math_defs.h"
namespace {
using complex_d = std::complex<double>;
#define HIL_SIZE 1024
#define OVERSAMP (1<<2)
#define HIL_STEP (HIL_SIZE / OVERSAMP)
#define FIFO_LATENCY (HIL_STEP * (OVERSAMP-1))
/* Define a Hann window, used to filter the HIL input and output. */
std::array<double,HIL_SIZE> InitHannWindow()
{
std::array<double,HIL_SIZE> ret;
/* Create lookup table of the Hann window for the desired size, i.e. HIL_SIZE */
for(size_t i{0};i < HIL_SIZE>>1;i++)
{
constexpr double scale{al::MathDefs<double>::Pi() / double{HIL_SIZE}};
const double val{std::sin(static_cast<double>(i+1) * scale)};
ret[i] = ret[HIL_SIZE-1-i] = val * val;
}
return ret;
}
alignas(16) const std::array<double,HIL_SIZE> HannWindow = InitHannWindow();
struct FshifterState final : public EffectState {
/* Effect parameters */
size_t mCount{};
uint mPhaseStep[2]{};
uint mPhase[2]{};
double mSign[2]{};
/* Effects buffers */
double mInFIFO[HIL_SIZE]{};
complex_d mOutFIFO[HIL_STEP]{};
complex_d mOutputAccum[HIL_SIZE]{};
complex_d mAnalytic[HIL_SIZE]{};
complex_d mOutdata[BufferLineSize]{};
alignas(16) float mBufferOut[BufferLineSize]{};
/* Effect gains for each output channel */
struct {
float Current[MAX_OUTPUT_CHANNELS]{};
float Target[MAX_OUTPUT_CHANNELS]{};
} mGains[2];
void deviceUpdate(const ALCdevice *device, const Buffer &buffer) override;
void update(const ALCcontext *context, const EffectSlot *slot, const EffectProps *props,
const EffectTarget target) override;
void process(const size_t samplesToDo, const al::span<const FloatBufferLine> samplesIn,
const al::span<FloatBufferLine> samplesOut) override;
DEF_NEWDEL(FshifterState)
};
void FshifterState::deviceUpdate(const ALCdevice*, const Buffer&)
{
/* (Re-)initializing parameters and clear the buffers. */
mCount = FIFO_LATENCY;
std::fill(std::begin(mPhaseStep), std::end(mPhaseStep), 0u);
std::fill(std::begin(mPhase), std::end(mPhase), 0u);
std::fill(std::begin(mSign), std::end(mSign), 1.0);
std::fill(std::begin(mInFIFO), std::end(mInFIFO), 0.0);
std::fill(std::begin(mOutFIFO), std::end(mOutFIFO), complex_d{});
std::fill(std::begin(mOutputAccum), std::end(mOutputAccum), complex_d{});
std::fill(std::begin(mAnalytic), std::end(mAnalytic), complex_d{});
for(auto &gain : mGains)
{
std::fill(std::begin(gain.Current), std::end(gain.Current), 0.0f);
std::fill(std::begin(gain.Target), std::end(gain.Target), 0.0f);
}
}
void FshifterState::update(const ALCcontext *context, const EffectSlot *slot,
const EffectProps *props, const EffectTarget target)
{
const ALCdevice *device{context->mDevice.get()};
const float step{props->Fshifter.Frequency / static_cast<float>(device->Frequency)};
mPhaseStep[0] = mPhaseStep[1] = fastf2u(minf(step, 1.0f) * MixerFracOne);
switch(props->Fshifter.LeftDirection)
{
case FShifterDirection::Down:
mSign[0] = -1.0;
break;
case FShifterDirection::Up:
mSign[0] = 1.0;
break;
case FShifterDirection::Off:
mPhase[0] = 0;
mPhaseStep[0] = 0;
break;
}
switch(props->Fshifter.RightDirection)
{
case FShifterDirection::Down:
mSign[1] = -1.0;
break;
case FShifterDirection::Up:
mSign[1] = 1.0;
break;
case FShifterDirection::Off:
mPhase[1] = 0;
mPhaseStep[1] = 0;
break;
}
const auto lcoeffs = CalcDirectionCoeffs({-1.0f, 0.0f, 0.0f}, 0.0f);
const auto rcoeffs = CalcDirectionCoeffs({ 1.0f, 0.0f, 0.0f}, 0.0f);
mOutTarget = target.Main->Buffer;
ComputePanGains(target.Main, lcoeffs.data(), slot->Gain, mGains[0].Target);
ComputePanGains(target.Main, rcoeffs.data(), slot->Gain, mGains[1].Target);
}
void FshifterState::process(const size_t samplesToDo, const al::span<const FloatBufferLine> samplesIn, const al::span<FloatBufferLine> samplesOut)
{
for(size_t base{0u};base < samplesToDo;)
{
size_t todo{minz(HIL_SIZE-mCount, samplesToDo-base)};
/* Fill FIFO buffer with samples data */
size_t count{mCount};
do {
mInFIFO[count] = samplesIn[0][base];
mOutdata[base] = mOutFIFO[count-FIFO_LATENCY];
++base; ++count;
} while(--todo);
mCount = count;
/* Check whether FIFO buffer is filled */
if(mCount < HIL_SIZE) break;
mCount = FIFO_LATENCY;
/* Real signal windowing and store in Analytic buffer */
for(size_t k{0};k < HIL_SIZE;k++)
mAnalytic[k] = mInFIFO[k]*HannWindow[k];
/* Processing signal by Discrete Hilbert Transform (analytical signal). */
complex_hilbert(mAnalytic);
/* Windowing and add to output accumulator */
for(size_t k{0};k < HIL_SIZE;k++)
mOutputAccum[k] += 2.0/OVERSAMP*HannWindow[k]*mAnalytic[k];
/* Shift accumulator, input & output FIFO */
std::copy_n(mOutputAccum, HIL_STEP, mOutFIFO);
auto accum_iter = std::copy(std::begin(mOutputAccum)+HIL_STEP, std::end(mOutputAccum),
std::begin(mOutputAccum));
std::fill(accum_iter, std::end(mOutputAccum), complex_d{});
std::copy(std::begin(mInFIFO)+HIL_STEP, std::end(mInFIFO), std::begin(mInFIFO));
}
/* Process frequency shifter using the analytic signal obtained. */
float *RESTRICT BufferOut{mBufferOut};
for(int c{0};c < 2;++c)
{
const uint phase_step{mPhaseStep[c]};
uint phase_idx{mPhase[c]};
for(size_t k{0};k < samplesToDo;++k)
{
const double phase{phase_idx * ((1.0/MixerFracOne) * al::MathDefs<double>::Tau())};
BufferOut[k] = static_cast<float>(mOutdata[k].real()*std::cos(phase) +
mOutdata[k].imag()*std::sin(phase)*mSign[c]);
phase_idx += phase_step;
phase_idx &= MixerFracMask;
}
mPhase[c] = phase_idx;
/* Now, mix the processed sound data to the output. */
MixSamples({BufferOut, samplesToDo}, samplesOut, mGains[c].Current, mGains[c].Target,
maxz(samplesToDo, 512), 0);
}
}
struct FshifterStateFactory final : public EffectStateFactory {
al::intrusive_ptr<EffectState> create() override
{ return al::intrusive_ptr<EffectState>{new FshifterState{}}; }
};
} // namespace
EffectStateFactory *FshifterStateFactory_getFactory()
{
static FshifterStateFactory FshifterFactory{};
return &FshifterFactory;
}

View file

@ -1,303 +0,0 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2009 by Chris Robinson.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <math.h>
#include <stdlib.h>
#include "alMain.h"
#include "alAuxEffectSlot.h"
#include "alError.h"
#include "alu.h"
#include "filters/defs.h"
#define MAX_UPDATE_SAMPLES 128
typedef struct ALmodulatorState {
DERIVE_FROM_TYPE(ALeffectState);
void (*GetSamples)(ALfloat*, ALsizei, const ALsizei, ALsizei);
ALsizei index;
ALsizei step;
alignas(16) ALfloat ModSamples[MAX_UPDATE_SAMPLES];
struct {
BiquadFilter Filter;
ALfloat CurrentGains[MAX_OUTPUT_CHANNELS];
ALfloat TargetGains[MAX_OUTPUT_CHANNELS];
} Chans[MAX_EFFECT_CHANNELS];
} ALmodulatorState;
static ALvoid ALmodulatorState_Destruct(ALmodulatorState *state);
static ALboolean ALmodulatorState_deviceUpdate(ALmodulatorState *state, ALCdevice *device);
static ALvoid ALmodulatorState_update(ALmodulatorState *state, const ALCcontext *context, const ALeffectslot *slot, const ALeffectProps *props);
static ALvoid ALmodulatorState_process(ALmodulatorState *state, ALsizei SamplesToDo, const ALfloat (*restrict SamplesIn)[BUFFERSIZE], ALfloat (*restrict SamplesOut)[BUFFERSIZE], ALsizei NumChannels);
DECLARE_DEFAULT_ALLOCATORS(ALmodulatorState)
DEFINE_ALEFFECTSTATE_VTABLE(ALmodulatorState);
#define WAVEFORM_FRACBITS 24
#define WAVEFORM_FRACONE (1<<WAVEFORM_FRACBITS)
#define WAVEFORM_FRACMASK (WAVEFORM_FRACONE-1)
static inline ALfloat Sin(ALsizei index)
{
return sinf(index*(F_TAU/WAVEFORM_FRACONE) - F_PI)*0.5f + 0.5f;
}
static inline ALfloat Saw(ALsizei index)
{
return (ALfloat)index / WAVEFORM_FRACONE;
}
static inline ALfloat Square(ALsizei index)
{
return (ALfloat)((index >> (WAVEFORM_FRACBITS - 1)) & 1);
}
#define DECL_TEMPLATE(func) \
static void Modulate##func(ALfloat *restrict dst, ALsizei index, \
const ALsizei step, ALsizei todo) \
{ \
ALsizei i; \
for(i = 0;i < todo;i++) \
{ \
index += step; \
index &= WAVEFORM_FRACMASK; \
dst[i] = func(index); \
} \
}
DECL_TEMPLATE(Sin)
DECL_TEMPLATE(Saw)
DECL_TEMPLATE(Square)
#undef DECL_TEMPLATE
static void ALmodulatorState_Construct(ALmodulatorState *state)
{
ALeffectState_Construct(STATIC_CAST(ALeffectState, state));
SET_VTABLE2(ALmodulatorState, ALeffectState, state);
state->index = 0;
state->step = 1;
}
static ALvoid ALmodulatorState_Destruct(ALmodulatorState *state)
{
ALeffectState_Destruct(STATIC_CAST(ALeffectState,state));
}
static ALboolean ALmodulatorState_deviceUpdate(ALmodulatorState *state, ALCdevice *UNUSED(device))
{
ALsizei i, j;
for(i = 0;i < MAX_EFFECT_CHANNELS;i++)
{
BiquadFilter_clear(&state->Chans[i].Filter);
for(j = 0;j < MAX_OUTPUT_CHANNELS;j++)
state->Chans[i].CurrentGains[j] = 0.0f;
}
return AL_TRUE;
}
static ALvoid ALmodulatorState_update(ALmodulatorState *state, const ALCcontext *context, const ALeffectslot *slot, const ALeffectProps *props)
{
const ALCdevice *device = context->Device;
ALfloat cw, a;
ALsizei i;
if(props->Modulator.Waveform == AL_RING_MODULATOR_SINUSOID)
state->GetSamples = ModulateSin;
else if(props->Modulator.Waveform == AL_RING_MODULATOR_SAWTOOTH)
state->GetSamples = ModulateSaw;
else /*if(Slot->Params.EffectProps.Modulator.Waveform == AL_RING_MODULATOR_SQUARE)*/
state->GetSamples = ModulateSquare;
state->step = float2int(props->Modulator.Frequency*WAVEFORM_FRACONE/device->Frequency + 0.5f);
state->step = clampi(state->step, 1, WAVEFORM_FRACONE-1);
/* Custom filter coeffs, which match the old version instead of a low-shelf. */
cw = cosf(F_TAU * props->Modulator.HighPassCutoff / device->Frequency);
a = (2.0f-cw) - sqrtf(powf(2.0f-cw, 2.0f) - 1.0f);
state->Chans[0].Filter.b0 = a;
state->Chans[0].Filter.b1 = -a;
state->Chans[0].Filter.b2 = 0.0f;
state->Chans[0].Filter.a1 = -a;
state->Chans[0].Filter.a2 = 0.0f;
for(i = 1;i < MAX_EFFECT_CHANNELS;i++)
BiquadFilter_copyParams(&state->Chans[i].Filter, &state->Chans[0].Filter);
STATIC_CAST(ALeffectState,state)->OutBuffer = device->FOAOut.Buffer;
STATIC_CAST(ALeffectState,state)->OutChannels = device->FOAOut.NumChannels;
for(i = 0;i < MAX_EFFECT_CHANNELS;i++)
ComputeFirstOrderGains(&device->FOAOut, IdentityMatrixf.m[i],
slot->Params.Gain, state->Chans[i].TargetGains);
}
static ALvoid ALmodulatorState_process(ALmodulatorState *state, ALsizei SamplesToDo, const ALfloat (*restrict SamplesIn)[BUFFERSIZE], ALfloat (*restrict SamplesOut)[BUFFERSIZE], ALsizei NumChannels)
{
ALfloat *restrict modsamples = ASSUME_ALIGNED(state->ModSamples, 16);
const ALsizei step = state->step;
ALsizei base;
for(base = 0;base < SamplesToDo;)
{
alignas(16) ALfloat temps[2][MAX_UPDATE_SAMPLES];
ALsizei td = mini(MAX_UPDATE_SAMPLES, SamplesToDo-base);
ALsizei c, i;
state->GetSamples(modsamples, state->index, step, td);
state->index += (step*td) & WAVEFORM_FRACMASK;
state->index &= WAVEFORM_FRACMASK;
for(c = 0;c < MAX_EFFECT_CHANNELS;c++)
{
BiquadFilter_process(&state->Chans[c].Filter, temps[0], &SamplesIn[c][base], td);
for(i = 0;i < td;i++)
temps[1][i] = temps[0][i] * modsamples[i];
MixSamples(temps[1], NumChannels, SamplesOut, state->Chans[c].CurrentGains,
state->Chans[c].TargetGains, SamplesToDo-base, base, td);
}
base += td;
}
}
typedef struct ModulatorStateFactory {
DERIVE_FROM_TYPE(EffectStateFactory);
} ModulatorStateFactory;
static ALeffectState *ModulatorStateFactory_create(ModulatorStateFactory *UNUSED(factory))
{
ALmodulatorState *state;
NEW_OBJ0(state, ALmodulatorState)();
if(!state) return NULL;
return STATIC_CAST(ALeffectState, state);
}
DEFINE_EFFECTSTATEFACTORY_VTABLE(ModulatorStateFactory);
EffectStateFactory *ModulatorStateFactory_getFactory(void)
{
static ModulatorStateFactory ModulatorFactory = { { GET_VTABLE2(ModulatorStateFactory, EffectStateFactory) } };
return STATIC_CAST(EffectStateFactory, &ModulatorFactory);
}
void ALmodulator_setParamf(ALeffect *effect, ALCcontext *context, ALenum param, ALfloat val)
{
ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_RING_MODULATOR_FREQUENCY:
if(!(val >= AL_RING_MODULATOR_MIN_FREQUENCY && val <= AL_RING_MODULATOR_MAX_FREQUENCY))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Modulator frequency out of range");
props->Modulator.Frequency = val;
break;
case AL_RING_MODULATOR_HIGHPASS_CUTOFF:
if(!(val >= AL_RING_MODULATOR_MIN_HIGHPASS_CUTOFF && val <= AL_RING_MODULATOR_MAX_HIGHPASS_CUTOFF))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Modulator high-pass cutoff out of range");
props->Modulator.HighPassCutoff = val;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid modulator float property 0x%04x", param);
}
}
void ALmodulator_setParamfv(ALeffect *effect, ALCcontext *context, ALenum param, const ALfloat *vals)
{ ALmodulator_setParamf(effect, context, param, vals[0]); }
void ALmodulator_setParami(ALeffect *effect, ALCcontext *context, ALenum param, ALint val)
{
ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_RING_MODULATOR_FREQUENCY:
case AL_RING_MODULATOR_HIGHPASS_CUTOFF:
ALmodulator_setParamf(effect, context, param, (ALfloat)val);
break;
case AL_RING_MODULATOR_WAVEFORM:
if(!(val >= AL_RING_MODULATOR_MIN_WAVEFORM && val <= AL_RING_MODULATOR_MAX_WAVEFORM))
SETERR_RETURN(context, AL_INVALID_VALUE,, "Invalid modulator waveform");
props->Modulator.Waveform = val;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid modulator integer property 0x%04x", param);
}
}
void ALmodulator_setParamiv(ALeffect *effect, ALCcontext *context, ALenum param, const ALint *vals)
{ ALmodulator_setParami(effect, context, param, vals[0]); }
void ALmodulator_getParami(const ALeffect *effect, ALCcontext *context, ALenum param, ALint *val)
{
const ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_RING_MODULATOR_FREQUENCY:
*val = (ALint)props->Modulator.Frequency;
break;
case AL_RING_MODULATOR_HIGHPASS_CUTOFF:
*val = (ALint)props->Modulator.HighPassCutoff;
break;
case AL_RING_MODULATOR_WAVEFORM:
*val = props->Modulator.Waveform;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid modulator integer property 0x%04x", param);
}
}
void ALmodulator_getParamiv(const ALeffect *effect, ALCcontext *context, ALenum param, ALint *vals)
{ ALmodulator_getParami(effect, context, param, vals); }
void ALmodulator_getParamf(const ALeffect *effect, ALCcontext *context, ALenum param, ALfloat *val)
{
const ALeffectProps *props = &effect->Props;
switch(param)
{
case AL_RING_MODULATOR_FREQUENCY:
*val = props->Modulator.Frequency;
break;
case AL_RING_MODULATOR_HIGHPASS_CUTOFF:
*val = props->Modulator.HighPassCutoff;
break;
default:
alSetError(context, AL_INVALID_ENUM, "Invalid modulator float property 0x%04x", param);
}
}
void ALmodulator_getParamfv(const ALeffect *effect, ALCcontext *context, ALenum param, ALfloat *vals)
{ ALmodulator_getParamf(effect, context, param, vals); }
DEFINE_ALEFFECT_VTABLE(ALmodulator);

View file

@ -0,0 +1,173 @@
/**
* OpenAL cross platform audio library
* Copyright (C) 2009 by Chris Robinson.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
* Or go to http://www.gnu.org/copyleft/lgpl.html
*/
#include "config.h"
#include <cmath>
#include <cstdlib>
#include <cmath>
#include <algorithm>
#include "alcmain.h"
#include "alcontext.h"
#include "core/filters/biquad.h"
#include "effectslot.h"
#include "vecmat.h"
namespace {
#define MAX_UPDATE_SAMPLES 128
#define WAVEFORM_FRACBITS 24
#define WAVEFORM_FRACONE (1<<WAVEFORM_FRACBITS)
#define WAVEFORM_FRACMASK (WAVEFORM_FRACONE-1)
inline float Sin(uint index)
{
constexpr float scale{al::MathDefs<float>::Tau() / WAVEFORM_FRACONE};
return std::sin(static_cast<float>(index) * scale);
}
inline float Saw(uint index)
{ return static_cast<float>(index)*(2.0f/WAVEFORM_FRACONE) - 1.0f; }
inline float Square(uint index)
{ return static_cast<float>(static_cast<int>((index>>(WAVEFORM_FRACBITS-2))&2) - 1); }
inline float One(uint) { return 1.0f; }
template<float (&func)(uint)>
void Modulate(float *RESTRICT dst, uint index, const uint step, size_t todo)
{
for(size_t i{0u};i < todo;i++)
{
index += step;
index &= WAVEFORM_FRACMASK;
dst[i] = func(index);
}
}
struct ModulatorState final : public EffectState {
void (*mGetSamples)(float*RESTRICT, uint, const uint, size_t){};
uint mIndex{0};
uint mStep{1};
struct {
BiquadFilter Filter;
float CurrentGains[MAX_OUTPUT_CHANNELS]{};
float TargetGains[MAX_OUTPUT_CHANNELS]{};
} mChans[MaxAmbiChannels];
void deviceUpdate(const ALCdevice *device, const Buffer &buffer) override;
void update(const ALCcontext *context, const EffectSlot *slot, const EffectProps *props,
const EffectTarget target) override;
void process(const size_t samplesToDo, const al::span<const FloatBufferLine> samplesIn,
const al::span<FloatBufferLine> samplesOut) override;
DEF_NEWDEL(ModulatorState)
};
void ModulatorState::deviceUpdate(const ALCdevice*, const Buffer&)
{
for(auto &e : mChans)
{
e.Filter.clear();
std::fill(std::begin(e.CurrentGains), std::end(e.CurrentGains), 0.0f);
}
}
void ModulatorState::update(const ALCcontext *context, const EffectSlot *slot,
const EffectProps *props, const EffectTarget target)
{
const ALCdevice *device{context->mDevice.get()};
const float step{props->Modulator.Frequency / static_cast<float>(device->Frequency)};
mStep = fastf2u(clampf(step*WAVEFORM_FRACONE, 0.0f, float{WAVEFORM_FRACONE-1}));
if(mStep == 0)
mGetSamples = Modulate<One>;
else if(props->Modulator.Waveform == ModulatorWaveform::Sinusoid)
mGetSamples = Modulate<Sin>;
else if(props->Modulator.Waveform == ModulatorWaveform::Sawtooth)
mGetSamples = Modulate<Saw>;
else /*if(props->Modulator.Waveform == ModulatorWaveform::Square)*/
mGetSamples = Modulate<Square>;
float f0norm{props->Modulator.HighPassCutoff / static_cast<float>(device->Frequency)};
f0norm = clampf(f0norm, 1.0f/512.0f, 0.49f);
/* Bandwidth value is constant in octaves. */
mChans[0].Filter.setParamsFromBandwidth(BiquadType::HighPass, f0norm, 1.0f, 0.75f);
for(size_t i{1u};i < slot->Wet.Buffer.size();++i)
mChans[i].Filter.copyParamsFrom(mChans[0].Filter);
mOutTarget = target.Main->Buffer;
auto set_gains = [slot,target](auto &chan, al::span<const float,MaxAmbiChannels> coeffs)
{ ComputePanGains(target.Main, coeffs.data(), slot->Gain, chan.TargetGains); };
SetAmbiPanIdentity(std::begin(mChans), slot->Wet.Buffer.size(), set_gains);
}
void ModulatorState::process(const size_t samplesToDo, const al::span<const FloatBufferLine> samplesIn, const al::span<FloatBufferLine> samplesOut)
{
for(size_t base{0u};base < samplesToDo;)
{
alignas(16) float modsamples[MAX_UPDATE_SAMPLES];
const size_t td{minz(MAX_UPDATE_SAMPLES, samplesToDo-base)};
mGetSamples(modsamples, mIndex, mStep, td);
mIndex += static_cast<uint>(mStep * td);
mIndex &= WAVEFORM_FRACMASK;
auto chandata = std::begin(mChans);
for(const auto &input : samplesIn)
{
alignas(16) float temps[MAX_UPDATE_SAMPLES];
chandata->Filter.process({&input[base], td}, temps);
for(size_t i{0u};i < td;i++)
temps[i] *= modsamples[i];
MixSamples({temps, td}, samplesOut, chandata->CurrentGains, chandata->TargetGains,
samplesToDo-base, base);
++chandata;
}
base += td;
}
}
struct ModulatorStateFactory final : public EffectStateFactory {
al::intrusive_ptr<EffectState> create() override
{ return al::intrusive_ptr<EffectState>{new ModulatorState{}}; }
};
} // namespace
EffectStateFactory *ModulatorStateFactory_getFactory()
{
static ModulatorStateFactory ModulatorFactory{};
return &ModulatorFactory;
}

View file

@ -1,179 +0,0 @@
#include "config.h"
#include <stdlib.h>
#include "AL/al.h"
#include "AL/alc.h"
#include "alMain.h"
#include "alAuxEffectSlot.h"
#include "alError.h"
typedef struct ALnullState {
DERIVE_FROM_TYPE(ALeffectState);
} ALnullState;
/* Forward-declare "virtual" functions to define the vtable with. */
static ALvoid ALnullState_Destruct(ALnullState *state);
static ALboolean ALnullState_deviceUpdate(ALnullState *state, ALCdevice *device);
static ALvoid ALnullState_update(ALnullState *state, const ALCcontext *context, const ALeffectslot *slot, const ALeffectProps *props);
static ALvoid ALnullState_process(ALnullState *state, ALsizei samplesToDo, const ALfloat (*restrict samplesIn)[BUFFERSIZE], ALfloat (*restrict samplesOut)[BUFFERSIZE], ALsizei mumChannels);
static void *ALnullState_New(size_t size);
static void ALnullState_Delete(void *ptr);
/* Define the ALeffectState vtable for this type. */
DEFINE_ALEFFECTSTATE_VTABLE(ALnullState);
/* This constructs the effect state. It's called when the object is first
* created. Make sure to call the parent Construct function first, and set the
* vtable!
*/
static void ALnullState_Construct(ALnullState *state)
{
ALeffectState_Construct(STATIC_CAST(ALeffectState, state));
SET_VTABLE2(ALnullState, ALeffectState, state);
}
/* This destructs (not free!) the effect state. It's called only when the
* effect slot is no longer used. Make sure to call the parent Destruct
* function before returning!
*/
static ALvoid ALnullState_Destruct(ALnullState *state)
{
ALeffectState_Destruct(STATIC_CAST(ALeffectState,state));
}
/* This updates the device-dependant effect state. This is called on
* initialization and any time the device parameters (eg. playback frequency,
* format) have been changed.
*/
static ALboolean ALnullState_deviceUpdate(ALnullState* UNUSED(state), ALCdevice* UNUSED(device))
{
return AL_TRUE;
}
/* This updates the effect state. This is called any time the effect is
* (re)loaded into a slot.
*/
static ALvoid ALnullState_update(ALnullState* UNUSED(state), const ALCcontext* UNUSED(context), const ALeffectslot* UNUSED(slot), const ALeffectProps* UNUSED(props))
{
}
/* This processes the effect state, for the given number of samples from the
* input to the output buffer. The result should be added to the output buffer,
* not replace it.
*/
static ALvoid ALnullState_process(ALnullState* UNUSED(state), ALsizei UNUSED(samplesToDo), const ALfloatBUFFERSIZE*restrict UNUSED(samplesIn), ALfloatBUFFERSIZE*restrict UNUSED(samplesOut), ALsizei UNUSED(numChannels))
{
}
/* This allocates memory to store the object, before it gets constructed.
* DECLARE_DEFAULT_ALLOCATORS can be used to declare a default method.
*/
static void *ALnullState_New(size_t size)
{
return al_malloc(16, size);
}
/* This frees the memory used by the object, after it has been destructed.
* DECLARE_DEFAULT_ALLOCATORS can be used to declare a default method.
*/
static void ALnullState_Delete(void *ptr)
{
al_free(ptr);
}
typedef struct NullStateFactory {
DERIVE_FROM_TYPE(EffectStateFactory);
} NullStateFactory;
/* Creates ALeffectState objects of the appropriate type. */
ALeffectState *NullStateFactory_create(NullStateFactory *UNUSED(factory))
{
ALnullState *state;
NEW_OBJ0(state, ALnullState)();
if(!state) return NULL;
return STATIC_CAST(ALeffectState, state);
}
/* Define the EffectStateFactory vtable for this type. */
DEFINE_EFFECTSTATEFACTORY_VTABLE(NullStateFactory);
EffectStateFactory *NullStateFactory_getFactory(void)
{
static NullStateFactory NullFactory = { { GET_VTABLE2(NullStateFactory, EffectStateFactory) } };
return STATIC_CAST(EffectStateFactory, &NullFactory);
}
void ALnull_setParami(ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, ALint UNUSED(val))
{
switch(param)
{
default:
alSetError(context, AL_INVALID_ENUM, "Invalid null effect integer property 0x%04x", param);
}
}
void ALnull_setParamiv(ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, const ALint* UNUSED(vals))
{
switch(param)
{
default:
alSetError(context, AL_INVALID_ENUM, "Invalid null effect integer-vector property 0x%04x", param);
}
}
void ALnull_setParamf(ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, ALfloat UNUSED(val))
{
switch(param)
{
default:
alSetError(context, AL_INVALID_ENUM, "Invalid null effect float property 0x%04x", param);
}
}
void ALnull_setParamfv(ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, const ALfloat* UNUSED(vals))
{
switch(param)
{
default:
alSetError(context, AL_INVALID_ENUM, "Invalid null effect float-vector property 0x%04x", param);
}
}
void ALnull_getParami(const ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, ALint* UNUSED(val))
{
switch(param)
{
default:
alSetError(context, AL_INVALID_ENUM, "Invalid null effect integer property 0x%04x", param);
}
}
void ALnull_getParamiv(const ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, ALint* UNUSED(vals))
{
switch(param)
{
default:
alSetError(context, AL_INVALID_ENUM, "Invalid null effect integer-vector property 0x%04x", param);
}
}
void ALnull_getParamf(const ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, ALfloat* UNUSED(val))
{
switch(param)
{
default:
alSetError(context, AL_INVALID_ENUM, "Invalid null effect float property 0x%04x", param);
}
}
void ALnull_getParamfv(const ALeffect *UNUSED(effect), ALCcontext *context, ALenum param, ALfloat* UNUSED(vals))
{
switch(param)
{
default:
alSetError(context, AL_INVALID_ENUM, "Invalid null effect float-vector property 0x%04x", param);
}
}
DEFINE_ALEFFECT_VTABLE(ALnull);

View file

@ -0,0 +1,79 @@
#include "config.h"
#include "alcmain.h"
#include "alcontext.h"
#include "almalloc.h"
#include "alspan.h"
#include "effects/base.h"
#include "effectslot.h"
namespace {
struct NullState final : public EffectState {
NullState();
~NullState() override;
void deviceUpdate(const ALCdevice *device, const Buffer &buffer) override;
void update(const ALCcontext *context, const EffectSlot *slot, const EffectProps *props,
const EffectTarget target) override;
void process(const size_t samplesToDo, const al::span<const FloatBufferLine> samplesIn,
const al::span<FloatBufferLine> samplesOut) override;
DEF_NEWDEL(NullState)
};
/* This constructs the effect state. It's called when the object is first
* created.
*/
NullState::NullState() = default;
/* This destructs the effect state. It's called only when the effect instance
* is no longer used.
*/
NullState::~NullState() = default;
/* This updates the device-dependant effect state. This is called on state
* initialization and any time the device parameters (e.g. playback frequency,
* format) have been changed. Will always be followed by a call to the update
* method, if successful.
*/
void NullState::deviceUpdate(const ALCdevice* /*device*/, const Buffer& /*buffer*/)
{
}
/* This updates the effect state with new properties. This is called any time
* the effect is (re)loaded into a slot.
*/
void NullState::update(const ALCcontext* /*context*/, const EffectSlot* /*slot*/,
const EffectProps* /*props*/, const EffectTarget /*target*/)
{
}
/* This processes the effect state, for the given number of samples from the
* input to the output buffer. The result should be added to the output buffer,
* not replace it.
*/
void NullState::process(const size_t/*samplesToDo*/,
const al::span<const FloatBufferLine> /*samplesIn*/,
const al::span<FloatBufferLine> /*samplesOut*/)
{
}
struct NullStateFactory final : public EffectStateFactory {
al::intrusive_ptr<EffectState> create() override;
};
/* Creates EffectState objects of the appropriate type. */
al::intrusive_ptr<EffectState> NullStateFactory::create()
{ return al::intrusive_ptr<EffectState>{new NullState{}}; }
} // namespace
EffectStateFactory *NullStateFactory_getFactory()
{
static NullStateFactory NullFactory{};
return &NullFactory;
}

Some files were not shown because too many files have changed in this diff Show more