Device: Refactor A2B APIs

* Removes features in `A2BMessage` class to support API for reading 16, 24, and 32 bit samples from A2B channels
* Re-organizes WAV receiving and transmitting code and API
* Creates API for mapping message channels to WAV channels and vice versa for transmitting and receiving
* Fixes `icsneo::Network::NetID::ExtendedData` VnetID bug for `icsneo::ExtendedDataMessage` decoding
* Creates RAD-A2B sequence chart example
* Fixes coremini uploading for certain devices in EEPROM by introducing `icsneo::Device::supportsEraseMemory`
pull/64/head
Yasser Yassine 2024-03-12 12:06:49 +00:00 committed by Kyle Schwarz
parent 06f6861130
commit cb22e622b3
33 changed files with 1014 additions and 947 deletions

View File

@ -235,7 +235,7 @@ endforeach()
set(SRC_FILES
communication/message/flexray/control/flexraycontrolmessage.cpp
communication/message/callback/streamoutput/a2bwavoutput.cpp
communication/message/callback/streamoutput/a2bdecoder.cpp
communication/message/a2bmessage.cpp
communication/message/neomessage.cpp
communication/message/ethphymessage.cpp
communication/message/linmessage.cpp

View File

@ -295,7 +295,6 @@ bool Decoder::decode(std::shared_ptr<Message>& result, const std::shared_ptr<Pac
case Network::NetID::ExtendedData: {
if(packet->data.size() < sizeof(ExtendedDataMessage::ExtendedDataHeader))
break;
const auto& header = *reinterpret_cast<ExtendedDataMessage::ExtendedDataHeader*>(packet->data.data());
switch(header.subCommand) {
@ -307,6 +306,8 @@ bool Decoder::decode(std::shared_ptr<Message>& result, const std::shared_ptr<Pac
extDataMsg->data.resize(numRead);
std::copy(packet->data.begin() + sizeof(header), packet->data.begin() + sizeof(header) + numRead, extDataMsg->data.begin());
extDataMsg->network = Network(static_cast<uint16_t>(Network::NetID::ExtendedData), false);
return true;
}
default:

View File

@ -0,0 +1,258 @@
#include "icsneo/communication/message/a2bmessage.h"
#include "icsneo/communication/message/callback/streamoutput/streamoutput.h"
using namespace icsneo;
// Read a 16 bit sample from the audio buffer, which is stored as little endian
#define SAMPLE_FROM_BYTES_16(audioData) (((audioData)[0]) | ((audioData)[1] << 8))
// Read a 32 bit sample from the audio buffer
#define SAMPLE_FROM_BYTES_32(audioData) (((audioData)[0]) | ((audioData)[1] << 8) | ((audioData)[2] << 16) | ((audioData)[3] << 24))
// Read the most significant bytes of a sample stored in a 32 bit unsigned integer into audioData
#define SAMPLE_TO_BYTES_16(audioData, offset, sample) {\
(audioData)[(offset)++] = static_cast<uint8_t>(((sample) & 0x00FF0000u) >> 16);\
(audioData)[(offset)++] = static_cast<uint8_t>(((sample) & 0xFF000000u) >> 24);\
}
// Read little endian a 32 bit unsigned integer into audioData
#define SAMPLE_TO_BYTES_32(audioData, offset, sample) {\
(audioData)[(offset)++] = static_cast<uint8_t>(((sample) & 0x000000FFu));\
(audioData)[(offset)++] = static_cast<uint8_t>(((sample) & 0x0000FF00u) >> 8);\
(audioData)[(offset)++] = static_cast<uint8_t>(((sample) & 0x00FF0000u) >> 16);\
(audioData)[(offset)++] = static_cast<uint8_t>(((sample) & 0xFF000000u) >> 24);\
}
uint8_t A2BMessage::tdmToChannelNum(TDMMode tdm) {
switch(tdm) {
case TDMMode::TDM2:
return 4;
case TDMMode::TDM4:
return 8;
case TDMMode::TDM8:
return 16;
case TDMMode::TDM12:
return 24;
case TDMMode::TDM16:
return 32;
case TDMMode::TDM20:
return 40;
case TDMMode::TDM24:
return 48;
case TDMMode::TDM32:
return 64;
}
return 0;
}
uint8_t A2BMessage::getBytesPerChannel() const {
return channelSize16 ? 2u : 4u;
}
size_t A2BMessage::getFrameSize() const {
return static_cast<size_t>(2 * numChannels * getBytesPerChannel());
}
size_t A2BMessage::getSampleOffset(Direction dir, uint8_t channel, size_t frame) const {
size_t frameSize = getFrameSize();
size_t sampleOffset = static_cast<size_t>(frameSize * frame + 2 * channel * getBytesPerChannel());
if(dir == Direction::Upstream) {
sampleOffset++;
}
return sampleOffset;
}
size_t A2BMessage::getNumFrames() const {
size_t frameSize = getFrameSize();
if(frameSize == 0) {
return 0;
}
return data.size() / frameSize;
}
A2BMessage::A2BMessage(size_t numFrames, TDMMode tdm, bool chSize16) : channelSize16(chSize16) {
numChannels = static_cast<uint8_t>(tdmToChannelNum(tdm) / 2);
size_t frameSize = static_cast<size_t>(2 * numChannels * (chSize16 ? 2u : 4u));
size_t audioBufferSize = frameSize * numFrames;
if(audioBufferSize > maxAudioBufferSize) {
size_t maxNumFrames = maxAudioBufferSize / frameSize;
audioBufferSize = maxNumFrames * frameSize;
}
data.resize(std::min<size_t>(maxAudioBufferSize, audioBufferSize), 0);
}
A2BMessage::A2BMessage(TDMMode tdm, bool chSize16) : channelSize16(chSize16) {
numChannels = static_cast<uint8_t>(tdmToChannelNum(tdm) / 2);
size_t frameSize = static_cast<size_t>(2 * numChannels * (chSize16 ? 2u : 4u));
size_t maxNumFrames = maxAudioBufferSize / frameSize;
size_t audioBufferSize = maxNumFrames * frameSize;
data.resize(audioBufferSize, 0);
}
PCMSample A2BMessage::getChannelSample(Direction dir, uint8_t channel, size_t frame, PCMType pcmType) const {
size_t sampleOffset = getSampleOffset(dir, channel, frame);
const uint8_t* audioData = &data[sampleOffset];
PCMSample result = 0;
// Samples coming from the device will either come from a 16 bit channel or 32 bit channel
if(channelSize16) {
int16_t sample16 = 0;
uint16_t& uSample16 = *reinterpret_cast<uint16_t*>(&sample16);
// Read little endian from the audio buffer
uSample16 = SAMPLE_FROM_BYTES_16(audioData);
// Scale the sample up according to the desired PCM size by
// multiplying using logical shifting
switch(pcmType) {
case PCMType::L16:
result = static_cast<PCMSample>(sample16);
break;
case PCMType::L24:
result = static_cast<PCMSample>(sample16) << 8;
break;
case PCMType::L32:
result = static_cast<PCMSample>(sample16) << 16;
break;
}
} else {
PCMSample sample32 = 0;
uint32_t& uSample32 = *reinterpret_cast<uint32_t*>(&sample32);
// Read little endian
uSample32 = SAMPLE_FROM_BYTES_32(audioData);
// Scale the sample down according to the desired PCM size by dividing using
// logical shifting, if the A2B network was set up with the desired pcmType
// there should be a clean division and no loss in PCM resolution.
switch(pcmType) {
case PCMType::L16:
result = sample32 >> 16;
break;
case PCMType::L24:
result = sample32 >> 8;
break;
case PCMType::L32:
result = sample32;
break;
}
}
return result;
}
void A2BMessage::setChannelSample(Direction dir, uint8_t channel, size_t frame, PCMSample sampleToSet, PCMType pcmType) {
size_t sampleOffset = getSampleOffset(dir, channel, frame);
uint8_t* audioData = data.data();
uint32_t& uSample = *reinterpret_cast<uint32_t*>(&sampleToSet);
// Align the bytes towards the most significant bit by multiplying using
// left shifts
switch(pcmType) {
case PCMType::L16:
sampleToSet = sampleToSet << 16;
break;
case PCMType::L24:
sampleToSet = sampleToSet << 8;
break;
}
if(channelSize16) {
// Read the 2 most significant bytes of the sample
SAMPLE_TO_BYTES_16(audioData, sampleOffset, uSample)
} else {
// Read the entire sample
SAMPLE_TO_BYTES_32(audioData, sampleOffset, uSample);
}
}
bool A2BMessage::loadAudioBuffer(IWAVStream& wavStream, const ChannelMap& channelMap) {
if(!wavStream) {
return false;
}
size_t totalMessageChannels = numChannels * 2; // Multiply by two inorder to include both down and upstream channels
size_t bytesPerChannel = static_cast<size_t>(getBytesPerChannel()); // Number of bytes per message channel
size_t frameSize = getFrameSize();
size_t numFrames = getNumFrames();
size_t bytesPerSampleWAV = static_cast<size_t>(wavStream.header.bitsPerSample / 8); // Number of bytes per sample in the WAV data-stream
size_t numWAVChannels = static_cast<size_t>(wavStream.header.numChannels);
size_t wavFrameSize = numWAVChannels * bytesPerSampleWAV;
if(bytesPerSampleWAV != 2 && bytesPerSampleWAV != 3 && bytesPerSampleWAV != 4) {
return false;
}
if(numFrames == 0) {
return false;
}
uint8_t* audioBuffer = data.data();
std::vector<uint8_t> wavFrame(wavFrameSize, 0);
for(size_t frame = 0; frame < numFrames; frame++) {
// Read one frame of data from the input stream
if(!wavStream.read(reinterpret_cast<char*>(wavFrame.data()), wavFrame.size())) {
break;
}
// Iterate through each mapping and set a message channel to a channel in the WAV frame above
for(const auto& [messageChannel, wavChannel] : channelMap) {
if(messageChannel >= totalMessageChannels || wavChannel >= numWAVChannels) {
return false;
}
size_t frameOffset = wavChannel * bytesPerSampleWAV; // Offset in the read WAV frame
size_t audioBufferOffset = frame * frameSize + messageChannel * bytesPerChannel; // Offset in the message audio buffer
if(bytesPerChannel < bytesPerSampleWAV) {
// In this case, the message channels are smaller than the samples in the input WAV
// samples in both the message channel and WAV are little endian, so we write only the
// most significant bytes of the WAV
// Align to most significant bytes of wav frame
size_t align = bytesPerSampleWAV - bytesPerChannel;
for(
size_t frameByte = frameOffset + align;
frameByte < frameOffset + bytesPerSampleWAV;
frameByte++,
audioBufferOffset++
) {
audioBuffer[audioBufferOffset] = wavFrame[frameByte];
}
} else {
// The message channel is greater than or equal to the sample in the WAV
// I2S specifies that the sample in this case is right aligned to the most significant
// byte of the message channel
// Align to most significant byte of audio buffer channel
size_t align = bytesPerChannel - bytesPerSampleWAV;
for(
size_t audioByte = audioBufferOffset + align;
audioByte < audioBufferOffset + bytesPerChannel;
audioByte++,
frameOffset++
) {
audioBuffer[audioByte] = wavFrame[frameOffset];
}
}
}
}
return true;
}

View File

@ -1,156 +0,0 @@
#include "icsneo/communication/message/callback/streamoutput/a2bdecoder.h"
#include <chrono>
#include "icsneo/icsneocpp.h"
namespace icsneo {
static constexpr uint8_t maxChannel = 255;
size_t A2BAudioChannelMap::getChannelIndex(Channel channel, A2BMessage::A2BDirection dir) const {
size_t output = (size_t)channel;
if(dir == A2BMessage::A2BDirection::Upstream) {
output++;
}
return output;
}
A2BAudioChannelMap::A2BAudioChannelMap(uint8_t tdm) {
rawMap.resize(2*tdm, maxChannel);
}
void A2BAudioChannelMap::set(Channel outChannel, A2BMessage::A2BDirection dir, Channel inChannel) {
auto index = getChannelIndex(outChannel, dir);
rawMap[index] = inChannel;
}
void A2BAudioChannelMap::setAll(Channel inChannel) {
std::fill(rawMap.begin(), rawMap.end(), inChannel);
}
Channel A2BAudioChannelMap::get(Channel outChannel, A2BMessage::A2BDirection dir) const {
auto index = getChannelIndex(outChannel, dir);
return rawMap[index];
}
size_t A2BAudioChannelMap::A2BAudioChannelMap::size() const {
return rawMap.size();
}
uint8_t A2BAudioChannelMap::getTDM() const {
return (uint8_t)(rawMap.size() / 2);
}
Channel& A2BAudioChannelMap::operator[](size_t idx) {
return rawMap[idx];
}
A2BAudioChannelMap::operator const std::vector<Channel>&() const {
return rawMap;
}
A2BDecoder::A2BDecoder(
std::unique_ptr<std::istream>&& streamOut,
bool chSize16,
const A2BAudioChannelMap& chMap
) : channelSize16(chSize16), channelMap(chMap) {
stream = std::move(streamOut);
tdm = chMap.getTDM();
initializeFromHeader();
}
A2BDecoder::A2BDecoder(
const char* filename,
bool chSize16,
const A2BAudioChannelMap& chMap
) : A2BDecoder(std::make_unique<std::ifstream>(filename, std::ios::binary), chSize16, chMap) { }
A2BDecoder::operator bool() const {
return initialized && stream->good() && !stream->eof();
}
void A2BDecoder::initializeFromHeader() {
WaveFileHeader header;
if(!stream->read((char*)&header, sizeof(header))) {
initialized = false;
return;
}
// Only allow 16 or 24 bit samples
if(header.bitsPerSample != 16 && header.bitsPerSample != 24) {
initialized = false;
return;
}
audioBytesPerSample = header.bitsPerSample == 16 ? 2 : 3;
channelsInWave = (uint8_t)header.numChannels;
size_t bytesPerSample = channelSize16 ? 2 : 4;
size_t frameSize = 2*tdm*bytesPerSample;
size_t frameSizeWave = (size_t)(channelsInWave) * (size_t)(audioBytesPerSample);
frame.resize(frameSize, 0);
frameWave.resize(frameSizeWave, 0);
initialized = true;
}
std::shared_ptr<A2BMessage> A2BDecoder::decode() {
if(!*(this)) {
return nullptr;
}
auto a2bMessagePtr = std::make_shared<icsneo::A2BMessage>(
tdm,
channelSize16,
2048
);
A2BMessage& a2bMessage = *a2bMessagePtr.get();
a2bMessage.setMonitorBit(false); // Probably not necessary
a2bMessage.setTxMsgBit(true);
a2bMessage.network = Network(Network::NetID::A2B2);
for(uint32_t frameIndex = 0; frameIndex < a2bMessage.getNumFrames(); frameIndex++) {
if(!stream->read((char*)frameWave.data(), frameWave.size())) {
break;
}
for(size_t icsChannel = 0; icsChannel < channelMap.size(); icsChannel++) {
if(channelMap[icsChannel] >= maxChannel) {
continue;
}
size_t wBegin = audioBytesPerSample * channelMap[icsChannel];
A2BPCMSample sample = 0;
uint8_t* sampBytes = (uint8_t*)&sample;
std::copy(frameWave.begin() + wBegin, frameWave.begin() + wBegin + audioBytesPerSample, sampBytes);
a2bMessage[frameIndex][icsChannel] = sample;
}
}
return a2bMessagePtr;
}
bool A2BDecoder::outputAll(std::shared_ptr<Device>& device) {
const auto& networks = device->getSupportedTXNetworks();
if(std::none_of(networks.begin(), networks.end(), [](const Network& net) { return net.getNetID() == Network::NetID::A2B2; })) {
return false;
}
while(*this) {
device->transmit(decode());
}
return true;
}
}

View File

@ -4,15 +4,99 @@
namespace icsneo {
void A2BWAVOutput::writeHeader(const std::shared_ptr<A2BMessage>& firstMsg) const {
A2BWAVOutput::A2BWAVOutput(
const char* filename,
const ChannelMap& channelMap,
PCMType bitDepth,
size_t numWAVChannels,
uint32_t sampleRate
)
: StreamOutput(filename), chMap(channelMap), wavSampleRate(sampleRate), numChannelsWAV(numWAVChannels) {
switch(bitDepth) {
case PCMType::L16:
bytesPerSampleWAV = 2;
break;
case PCMType::L24:
bytesPerSampleWAV = 3;
break;
case PCMType::L32:
bytesPerSampleWAV = 4;
break;
}
if(initialize()) {
initialized = true;
}
}
A2BWAVOutput::A2BWAVOutput(
std::ostream& os,
const ChannelMap& channelMap,
PCMType bitDepth,
size_t numWAVChannels,
uint32_t sampleRate
)
: StreamOutput(os), chMap(channelMap), wavSampleRate(sampleRate), numChannelsWAV(numWAVChannels) {
switch(bitDepth) {
case PCMType::L16:
bytesPerSampleWAV = 2;
break;
case PCMType::L24:
bytesPerSampleWAV = 3;
break;
case PCMType::L32:
bytesPerSampleWAV = 4;
break;
}
if(initialize()) {
initialized = true;
}
}
A2BWAVOutput::~A2BWAVOutput() {
if(!closed) {
close();
}
}
bool A2BWAVOutput::initialize() {
static constexpr size_t maxWAVChannels = 256;
if(numChannelsWAV > maxWAVChannels) {
return false;
}
maxMessageChannel = 0;
// Check if the inputted channel map has invalid mappings and compute maxMessageChannel
for(auto [wavChannel, messageChannel] : chMap) {
maxMessageChannel = std::max<size_t>(maxMessageChannel, messageChannel);
if(wavChannel >= numChannelsWAV) {
return false;
}
}
WAVHeader header = WAVHeader(
static_cast<uint16_t>(chMap.size()),
wavSampleRate,
static_cast<uint16_t>(bytesPerSampleWAV * 8)
);
if(!stream->write(reinterpret_cast<const char*>(&header), sizeof(WAVHeader))) {
return false;
}
WaveFileHeader header = WaveFileHeader(2 * firstMsg->getNumChannels(), wavSampleRate, firstMsg->getBitDepth());
header.write(stream);
streamStartPos = static_cast<uint32_t>(stream->tellp());
wavBuffer = std::vector<uint8_t>(wavBufferSize, 0);
wavBufferOffset = 0;
return true;
}
bool A2BWAVOutput::callIfMatch(const std::shared_ptr<Message>& message) const {
if(!initialized) {
return false;
}
if(closed) {
return false;
@ -22,28 +106,87 @@ bool A2BWAVOutput::callIfMatch(const std::shared_ptr<Message>& message) const {
return false;
}
const auto& frame = std::static_pointer_cast<Frame>(message);
const auto& frameMsg = std::dynamic_pointer_cast<Frame>(message);
if(frame->network.getType() != Network::Type::A2B)
if(!frameMsg) {
return false;
}
if(frameMsg->network.getType() != Network::Type::A2B)
return false;
const auto& a2bmsg = std::static_pointer_cast<A2BMessage>(frame);
const auto& a2bMsg = std::dynamic_pointer_cast<A2BMessage>(frameMsg);
if(firstMessageFlag) {
writeHeader(a2bmsg);
firstMessageFlag = false;
if(!a2bMsg) {
return false;
}
// Might need to readd this block of code later if sample alignment fix is necessary
/*
std::streamsize bps = (std::streamsize)a2bmsg->getBytesPerSample();
for(size_t i=0; i<a2bmsg->getNumSamples(); i++) {
A2BPCMSample samp = *(a2bmsg->getSample(i));
write((void*)&samp, bps);
}
*/
size_t frameSize = a2bMsg->getFrameSize();
size_t wavFrameSize = numChannelsWAV * bytesPerSampleWAV;
size_t bytesPerChannel = static_cast<size_t>(a2bMsg->getBytesPerChannel());
size_t numMessageChannels = 2 * a2bMsg->numChannels;
size_t numFrames = a2bMsg->getNumFrames();
write((void*)a2bmsg->getAudioBuffer(), a2bmsg->getAudioBufferSize());
const uint8_t* audioBuffer = a2bMsg->data.data();
if(maxMessageChannel >= numMessageChannels) {
// The max message channel in our channel map is larger than the number of channels in this message
// this is likely due to the user inputting incorrect settings
return false;
}
for(size_t frame = 0; frame < numFrames; frame++) {
// Check to see if we can read another frame in wavBuffer, otherwise write and clear the buffer
if(wavBufferOffset + wavFrameSize >= wavBufferSize) {
if(!writeCurrentBuffer()) {
return false;
}
}
for(size_t wavChannel = 0; wavChannel < numChannelsWAV; wavChannel++) {
if(auto iter = chMap.find(static_cast<uint8_t>(wavChannel)); iter != chMap.end()) {
auto messageChannel = iter->second;
size_t messageChannelOffset = messageChannel * bytesPerChannel + frameSize* frame;
// Samples in the WAV are little endian signed integers
// Samples in the message channels are little endian signed integers that are
// most significant bit aligned
if(a2bMsg->channelSize16) {
// In this case, the channel size will be less than or equal to the sample we are writing
// so we zero out any of the least significant bytes which won't be occupied by a sample byte
for(size_t zeroByte = 0; zeroByte < bytesPerSampleWAV - bytesPerChannel; zeroByte++) {
wavBuffer[wavBufferOffset++] = 0;
}
// Write the channel data in the most signifant bytes of the wav sample, this effectively
// writes a sample which is scaled up.
for(size_t channelByte = 0; channelByte < bytesPerChannel; channelByte++) {
wavBuffer[wavBufferOffset++] = audioBuffer[messageChannelOffset + channelByte];
}
} else {
// In this case, the channel size will be greater than or equal to the sample we are reading
// Align the wav sample with the most significant bytes of the channel
size_t channelByte = messageChannelOffset + (bytesPerChannel - bytesPerSampleWAV);
// Read the most significant bytes of the channel into the wavBuffer
for(size_t sampleByte = 0; sampleByte < bytesPerSampleWAV; sampleByte++, channelByte++) {
wavBuffer[wavBufferOffset++] = audioBuffer[channelByte];
}
}
} else {
// If this channel wasn't specified in the channel map, set a zero sample
for(
size_t sampleByte = 0;
sampleByte < bytesPerSampleWAV;
sampleByte++
) {
wavBuffer[wavBufferOffset++] = 0;
}
}
}
}
return true;
}
@ -53,17 +196,39 @@ void A2BWAVOutput::close() const {
return;
}
if(!initialized) {
return;
}
// Write any left over data in the buffer
if(wavBufferOffset > 0) {
writeCurrentBuffer();
}
// Seek back in the output stream and write the WAV chunk sizes
uint32_t streamEndPos = static_cast<uint32_t>(stream->tellp());
uint32_t subChunk2Size = streamEndPos - streamStartPos;
uint32_t chunkSize = streamEndPos - 8;
stream->seekp(streamStartPos - 4);
write((void*)&subChunk2Size, 4);
stream->write(reinterpret_cast<const char*>(&subChunk2Size), 4);
stream->seekp(4, std::ios::beg);
write((void*)&chunkSize, 4);
stream->write(reinterpret_cast<const char*>(&chunkSize), 4);
closed = true;
}
bool A2BWAVOutput::writeCurrentBuffer() const {
if(!stream->write(reinterpret_cast<const char*>(wavBuffer.data()), wavBufferOffset)) {
return false;
}
wavBufferOffset = 0;
return true;
}
}

View File

@ -17,41 +17,38 @@ std::shared_ptr<Message> HardwareA2BPacket::DecodeToMessage(const std::vector<ui
size_t totalPackedLength = static_cast<size_t>(bytestream.size()) - sizeof(HardwareA2BPacket); // First 28 bytes are message header.
std::shared_ptr<A2BMessage> msg = std::make_shared<A2BMessage>(
(uint8_t)data->header.channelNum,
data->header.channelSize16,
totalPackedLength
);
if(totalPackedLength == 0) {
return nullptr;
}
msg->setMonitorBit(data->header.monitor);
msg->setTxMsgBit(data->header.txmsg);
msg->setErrIndicatorBit(data->header.errIndicator);
msg->setSyncFrameBit(data->header.syncFrame);
msg->setRFU2(data->header.rfu2);
std::shared_ptr<A2BMessage> msg = std::make_shared<A2BMessage>();
msg->numChannels = data->header.channelNum;
msg->channelSize16 = data->header.channelSize16;
msg->monitor = data->header.monitor;
msg->txmsg = data->header.txmsg;
msg->errIndicator = data->header.errIndicator;
msg->syncFrame = data->header.syncFrame;
msg->rfu2 = data->header.rfu2;
msg->timestamp = data->timestamp.TS;
msg->setAudioBuffer(bytestream.begin() + sizeof(HardwareA2BPacket), bytestream.end());
msg->data = std::vector(bytestream.begin() + sizeof(HardwareA2BPacket), bytestream.end());
return msg;
}
bool HardwareA2BPacket::EncodeFromMessage(const A2BMessage& message, std::vector<uint8_t>& bytestream, const device_eventhandler_t& report) {
bool HardwareA2BPacket::EncodeFromMessage(const A2BMessage& message, std::vector<uint8_t>& bytestream, const device_eventhandler_t& /*report*/) {
constexpr size_t a2btxMessageHeaderSize = 6;
if(message.getBytesPerSample() != 2 && message.getBytesPerSample() != 4) {
report(APIEvent::Type::MessageFormattingError, APIEvent::Severity::Error);
return false;
}
size_t sampleBytes = message.getAudioBufferSize();
size_t totalSize = a2btxMessageHeaderSize + sampleBytes;
size_t audioBufferSize = message.data.size();
size_t totalSize = a2btxMessageHeaderSize + audioBufferSize;
bytestream.resize(totalSize, 0);
uint32_t offset = 0;
bytestream[offset++] = 0;
bytestream[offset++] = 0;
bytestream[offset++] = (uint8_t)(sampleBytes & 0xFF);
bytestream[offset++] = (uint8_t)((sampleBytes >> 8) & 0xFF);
bytestream[offset++] = (uint8_t)(audioBufferSize & 0xFF);
bytestream[offset++] = (uint8_t)((audioBufferSize >> 8) & 0xFF);
bytestream[offset++] = (uint8_t)((message.description >> 8) & 0xFF);
bytestream[offset++] = (uint8_t)(message.description & 0xFF);

View File

@ -490,7 +490,7 @@ bool Device::startScript(Disk::MemoryType memType)
const auto response = com->waitForMessageSync([&]() {
return com->sendCommand(Command::LoadCoreMini, location);
}, filter);
}, filter, std::chrono::milliseconds(2000));
if(!response) {
report(APIEvent::Type::NoDeviceResponse, APIEvent::Severity::Error);
@ -522,14 +522,14 @@ bool Device::stopScript()
return true;
}
bool Device::uploadCoremini(std::unique_ptr<std::istream>&& stream, Disk::MemoryType memType) {
bool Device::uploadCoremini(std::istream& stream, Disk::MemoryType memType) {
if(!stream || stream->bad()) {
if(stream.bad()) {
report(APIEvent::Type::RequiredParameterNull, APIEvent::Severity::Error);
return false;
}
std::vector<char> bin(std::istreambuf_iterator<char>(*stream), {}); // Read the whole stream
std::vector<char> bin(std::istreambuf_iterator<char>(stream), {}); // Read the whole stream
if(bin.size() < 4) {
report(APIEvent::Type::BufferInsufficient, APIEvent::Severity::Error);
@ -596,6 +596,10 @@ bool Device::uploadCoremini(std::unique_ptr<std::istream>&& stream, Disk::Memory
bool Device::eraseScriptMemory(Disk::MemoryType memType, uint64_t amount) {
static std::shared_ptr<MessageFilter> NeoEraseDone = std::make_shared<MessageFilter>(Network::NetID::NeoMemoryWriteDone);
if(!supportsEraseMemory()) {
return true;
}
auto startAddress = getCoreminiStartAddress(memType);
if(!startAddress) {
return false;
@ -612,7 +616,7 @@ bool Device::eraseScriptMemory(Disk::MemoryType memType, uint64_t amount) {
arguments[0] = static_cast<uint8_t>(memType);
*reinterpret_cast<uint32_t*>(&arguments[1]) = static_cast<uint32_t>(*startAddress / 512);
*reinterpret_cast<uint32_t*>(&arguments[5])= numWords;
*reinterpret_cast<uint32_t*>(&arguments[5]) = numWords;
auto msg = com->waitForMessageSync([this, &arguments] {
return com->sendCommand(Command::NeoEraseMemory, arguments);

View File

@ -101,17 +101,23 @@ The write blocking status of the device determines the behavior of attempting to
If write blocking is enabled, then the transmitting thread will wait for the entire buffer to be transmitted.
If write blocking is disabled, then the attempt to transmit will simply fail and an error will be logged on the calling thread.
A2B Wave Output
~~~~~~~~~~~~~~~~~~~~
Users may add a ``icsneo::A2BWAVOutput`` message callback to their device in order to write A2B PCM data to a WAVE file. The message callback listens for ``icsneo::A2BMessage``
messages and writes both downstream and upstream channels to a single wave file. If downstream and upstream each have ``32`` channels, the wave file will contain ``2*32 = 64``
total channels. Channels are indexed at 0 and interleaved such that downstream are on even number channels and upstream on odd number channels. If we introduce a
variable ``IS_UPSTREAM`` which is ``0`` when downstream and ``1`` when upstream and desired a channel ``CHANNEL_NUM`` the corresponding channel in the wave file would be
``2*CHANNEL_NUM + IS_UPSTREAM``.
A2B message channel indexing
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The libicsneo API allows users to input and output WAV file via receiving or transmitting A2B messages. The library provides several ways to import and export
WAV files via A2B traffic. While using the API, a user will encounter message channels being referenced as unsigned integers for indexing. A ``icsneo::A2BMessage``
object will contain both upstream and downstream channels. This implies that the number of channels that a A2B message has is twice the TDM mode. The ordering for these
channels are interleved. Therefore, a message channel index ``0`` would represent downstream channel ``0`` in the network, message channel index ``1`` would represent
upstream channel ``0`` in the network, and so on. More generally, a message channel index can be computed with the formula ``2 * CHANNEL + IS_UPSTREAM`` where channel
is the channel referred in the A2B network and ``IS_UPSTREAM`` is ``1`` when a channel is upstream and ``0`` if downstream.
Wave files may be split by channel using programs such as ``FFmpeg``. Consider a file ``out.wav`` which was generated using a ``icsneo::A2BWAVOutput`` object
and contains ``32`` channels per stream. The ``icsneo::A2BWavoutput`` object injested PCM data with a sample rate of ``44.1 kHz`` and bit depth of ``24``. The corresponding
channel of upstream channel ``8`` in ``out.wav`` would be ``2*CHANNEL_NUM + IS_UPSTREAM = 2*8 + 1 = 17``. The following ``FFmpeg`` command may be ran in a linux environment to create a new wave
file ``out_upstream_ch8.wav`` which contains only PCM samples off of upstream channel ``8``.
One area where these message channel indexes are used are when specifying a ``icsneo::ChannelMap`` for WAV transmit or receive. When transmitting a WAV file, the
map will map message channels to the input WAV file. For receiving a WAV file, the map will map the output WAV channels to channels from received messages.
``ffmpeg -i out.wav -ar 44100 -acodec pcm_s24le -map_channel 0.0.17 out_upstream_ch8.wav``
EX: If we want to transmit, we will need to construct a mapping from message channels to the input WAV file. So, if we have a monotone WAV file that we are inputting
through the API, then we can map channel ``2`` upstream to channel 0 in the input WAV (the only channel in the WAV file) with the following:
``icsneo::ChannelMap chMap``
``chMap[5] = 0``
Since we are transmitting, we must map our desired A2B message channels to the input WAV file. We have ``0`` representing the single channel in the WAV file and
``5`` representing channel ``2`` upstream in the A2B message from using the formula above ``2 * CHANNEL + IS_UPSTREAM = 2 * 2 + 1 = 5``.

View File

@ -1,111 +1,144 @@
// libicsneo A2B example
// Example must be ran with rada2b as slave on TDM4 32 bit channel size and one ADI master node
// Options:
// -h, --help Display help message.
// -e, --example [EXAMPLE_NUM] Example to run.
// Example usage: ./libicsneocpp-a2b.exe --example 1
// Example usage: ./libicsneocpp-a2b.exe -h
/**
* libicsneo A2B example
*
* Example were made to be ran with RAD-A2B as main node on TDM4 16 bit channel size and one additional sub node (either an ADI board or an additional RAD-A2B).
* Follow the specific hardware instructions per example to ensure expected output. Be sure to configure the A2B network before running these examples, especially
* ones which Tx or Rx audio.
*
* Options:
* -h, --help Display help message.
* -e, --example [EXAMPLE_NUM] Example to run.
* Example usage: ./libicsneocpp-a2b.exe --example 1
* Example usage: ./libicsneocpp-a2b.exe -h
*/
#include <iostream>
#include <fstream>
#include <icsneo/icsneocpp.h>
#include <icsneo/device/tree/rada2b/rada2bsettings.h>
#include <icsneo/communication/message/callback/streamoutput/a2bdecoder.h>
#include <icsneo/communication/message/callback/streamoutput/a2bwavoutput.h>
#include <string>
#include <math.h>
static constexpr size_t numFramesInWave = 48;
std::string makeWave() {
icsneo::WaveFileHeader header = icsneo::WaveFileHeader(1, 48000, 24);
std::vector<uint8_t> sineWaveSamples = {
0x00, 0x2B, 0x98, 0x08, 0x25, 0x01, 0x10, 0xD3, 0xEF, 0x18, 0x40, 0xA3, 0x20, 0x33, 0x4C, 0x26,
0xCE, 0xCA, 0x2D, 0x5B, 0x41, 0x32, 0xB9, 0x3C, 0x37, 0x6E, 0x50, 0x3B, 0x29, 0x18, 0x3D, 0xC2,
0x96, 0x3F, 0x86, 0xD3, 0x3F, 0xEC, 0x35, 0x3F, 0x85, 0xA7, 0x3D, 0xC4, 0x19, 0x3B, 0x28, 0xC9,
0x37, 0x6B, 0x5A, 0x32, 0xC1, 0xC4, 0x2D, 0x4A, 0xEE, 0x26, 0xE8, 0xB1, 0x20, 0x0E, 0xCF, 0x18,
0x6F, 0xAA, 0x10, 0x9C, 0x17, 0x08, 0x53, 0x35, 0x00, 0x01, 0xFD, 0xF7, 0xA9, 0x29, 0xEF, 0x66,
0x87, 0xE7, 0x8F, 0x37, 0xDF, 0xF0, 0x8A, 0xD9, 0x19, 0xB3, 0xD2, 0xB1, 0x3E, 0xCD, 0x42, 0xE9,
0xC8, 0x8F, 0xE0, 0xC4, 0xDB, 0x39, 0xC2, 0x39, 0x78, 0xC0, 0x7A, 0x8A, 0xC0, 0x16, 0x38, 0xC0,
0x74, 0x18, 0xC2, 0x44, 0xBF, 0xC4, 0xCE, 0x26, 0xC8, 0x9A, 0x99, 0xCD, 0x3F, 0x53, 0xD2, 0xA8,
0xBD, 0xD9, 0x32, 0xF5, 0xDF, 0xC2, 0x68, 0xE7, 0xD5, 0xFD, 0xEF, 0x02, 0x15, 0xF8, 0x3B, 0x33,
std::string makeWAV() {
icsneo::WAVHeader header = icsneo::WAVHeader(1, 48000, 16);
std::vector<uint8_t> sineWAVSamples = {
0xFF, 0x3F, 0x81, 0x5A, 0xD9, 0x6E, 0xA2, 0x7B, 0xFF, 0x7F, 0xA2, 0x7B, 0xD9, 0x6E, 0x81, 0x5A,
0xFF, 0x3F, 0x20, 0x21, 0x00, 0x00, 0xE0, 0xDE, 0x01, 0xC0, 0x7F, 0xA5, 0x27, 0x91, 0x5E, 0x84,
0x01, 0x80, 0x5E, 0x84, 0x27, 0x91, 0x7F, 0xA5, 0x01, 0xC0, 0xE0, 0xDe, 0x00, 0x00, 0x20, 0x21
};
std::vector<uint8_t> sineWave;
sineWave.reserve(sineWaveSamples.size() + sizeof(header));
std::vector<uint8_t> sineWAV;
sineWAV.reserve(sineWAVSamples.size() + sizeof(header));
sineWave.insert(sineWave.begin(), (uint8_t*)&header, (uint8_t*)(&header) + sizeof(header));
std::copy(sineWaveSamples.begin(), sineWaveSamples.end(), std::back_inserter(sineWave));
sineWAV.insert(sineWAV.begin(), (uint8_t*)&header, (uint8_t*)(&header) + sizeof(header));
std::copy(sineWAVSamples.begin(), sineWAVSamples.end(), std::back_inserter(sineWAV));
return std::string(sineWave.begin(), sineWave.end());
return std::string(sineWAV.begin(), sineWAV.end());
}
// Example 0: TX
void example0(std::shared_ptr<icsneo::Device>& rada2b) {
std::cout << "Transmitting a sine wave..." << std::endl;
// Create sine tone in wave format
std::string waveString = makeWave();
// Audio map to map which channel in wave to stream on a2b bus.
icsneo::A2BAudioChannelMap a2bmap(4);
a2bmap.set(
2, // Channel on a2b bus
icsneo::A2BMessage::A2BDirection::Downstream, // Direction
0 // Channel in wave file
);
a2bmap.setAll(0);
icsneo::A2BDecoder decoder(
std::make_unique<std::istringstream>(waveString), // Wave file stream
false, // True when using 16 bit samples
a2bmap
);
/**
* Example 0: TX
*/
void example0(const std::shared_ptr<icsneo::Device>& rada2b) {
std::cout << "Transmitting a sine tone..." << std::endl;
// Create sine tone in wav format
std::string wavString = makeWAV();
std::istringstream sineWAV(wavString);
double elapsedTime = 0.0;
// Create a IWAVStream object which represents a WAV data stream
// the IWAVStream object here is initialized with an outside std::ostream,
// so it holds a reference pointer to this stream.
icsneo::IWAVStream wavStream(sineWAV);
// Create a channel map which maps each message channel to a channel in the input WAV file
icsneo::ChannelMap channelMap;
// Here we will just set every message channel to channel 0 in the WAV file
// We have 8 channels since this is TDM4 and we include both upstream and downstream
// see docs for specific message channel labeling information
for(uint8_t messageChannel = 0; messageChannel < 8; messageChannel++) {
channelMap[messageChannel] = 0;
}
// Play roughly 5 seconds of sine tone.
while(elapsedTime < 5.0) {
while(elapsedTime < 5.0) {
// If WAVStream is invalid (at EOF) break out of loop
if(!wavStream) {
break;
}
decoder.outputAll(rada2b); // Output entire wave file
// Creates a new message with the maximum amount of allocated frames
auto msg = std::make_shared<icsneo::A2BMessage>(
icsneo::A2BMessage::TDMMode::TDM4, /* TDM mode of the message, we use TDM4 for this whole example*/
true /* true if we want 16 bit channels in the message, false for 32 bit. This should match the RAD-A2B device setting */
);
msg->txmsg = true;
msg->network = icsneo::Network(icsneo::Network::NetID::A2B2);
elapsedTime += (static_cast<double>(numFramesInWave)) * 1.0/48000.0;
// Load the WAV audio data into the desired channel, break if we fail to load
if(!msg->loadAudioBuffer(wavStream, channelMap)) {
break;
}
decoder.stream->clear();
decoder.stream->seekg(0, std::ios::beg);
// Also outputs entire wave file
while(decoder && elapsedTime < 5.0) {
auto msg = decoder.decode();
rada2b->transmit(msg);
// Transmit the message
if(!rada2b->transmit(msg)) {
std::cout << "Failed to transmit." << std::endl;
break;
}
elapsedTime += (static_cast<double>(msg->getNumFrames()))*1.0/48000.0;
}
decoder.stream->clear();
decoder.stream->seekg(0, std::ios::beg);
// Reset the WAV stream
wavStream.reset();
}
}
// Example 1: RX
void example1(std::shared_ptr<icsneo::Device>& rada2b) {
/**
* Example 1: RX
*/
void example1(const std::shared_ptr<icsneo::Device>& rada2b) {
std::cout << "Receiving 5 seconds of audio data..." << std::endl;
// Add WAV output message callback
// Saves samples to "out.wav"
auto handler = rada2b->addMessageCallback(std::make_shared<icsneo::A2BWAVOutput>("out.wav", 48000));
auto handler = rada2b->addMessageCallback(
std::make_shared<icsneo::A2BWAVOutput>(
"audio16bit.wav", /* output file name */
icsneo::ChannelMap( /** channel mapping which maps our output WAV channels to the message channels from incoming messages */
{ /* See docs for specific A2B channel indexing information */
{static_cast<uint8_t>(3u), static_cast<uint8_t>(0u)}, /* Map output WAV channel 3 to channel 0 downstream of the A2B network/A2BMessage */
{static_cast<uint8_t>(2u), static_cast<uint8_t>(1u)}, /* Map output WAV channel 2 to channel 0 upstream of the A2B network/A2BMessage */
{static_cast<uint8_t>(1u), static_cast<uint8_t>(2u)}, /* Map output WAV channel 1 to channel 1 downstream of the A2B network/A2BMessage */
{static_cast<uint8_t>(0u), static_cast<uint8_t>(3u)} /* Map output WAV channel 0 to channel 1 upstream of the A2B network/A2BMessage */
}
),
icsneo::PCMType::L16, /* store samples with 16 bit resolution*/
2u, /* Number of channels in the output WAV file */
48000 /* Sample rate of WAV file */
)
);
// Sleep this thread for 5 seconds, message callback still runs
std::this_thread::sleep_for(std::chrono::seconds(5));
// Make sure you send 16 bit audio data on the above message channels in the channel map
// to the RAD-A2B main node through a microphone or a different modem.
// You can configure the message channels by changing the stream config in the A2B schematic
// Remove callback
rada2b->removeMessageCallback(handler);
}
// Example 2: RADA2B settings
void example2(std::shared_ptr<icsneo::Device>& rada2b) {
/**
* Example 2: RAD-A2B settings
*/
void example2(const std::shared_ptr<icsneo::Device>& rada2b) {
uint8_t numChannels;
{
// Get device settings
@ -129,82 +162,70 @@ void example2(std::shared_ptr<icsneo::Device>& rada2b) {
rada2bSettings->setNodeType(icsneo::RADA2BSettings::RADA2BDevice::Node, icsneo::RADA2BSettings::NodeType::Master);
// Set TDM mode to TDM8
rada2bSettings->setTDMMode(icsneo::RADA2BSettings::RADA2BDevice::Node, icsneo::RADA2BSettings::TDMMode::TDM8);
rada2bSettings->setTDMMode(icsneo::RADA2BSettings::RADA2BDevice::Node, icsneo::RADA2BSettings::TDMMode::TDM4);
// Apply local settings to device
rada2bSettings->apply();
}
}
// Example 3: A2BMessage API
void example3() {
icsneo::A2BMessage msg = icsneo::A2BMessage(4, false, 2048); // Create new A2BMessage
msg[0][0] = 60; // Set sample using operator[][]
msg[0][3] = 60; // Frame 0, channel 2 upstream
msg[6][2] = 32; // Frame 6, channel 1 downstream
// Equivalent to last line
msg.setSample(icsneo::A2BMessage::A2BDirection::Downstream, 1, 6, 32);
// Get sample
std::cout << "Channel 1 downstream sample for frame 6: " << msg.getSample(icsneo::A2BMessage::A2BDirection::Downstream, 1, 6).value() << std::endl;
// Get number of frames
auto numFrames = msg.getNumFrames();
std::cout << "Num frames: " << numFrames << std::endl;
icsneo::A2BPCMSample sample1 = 40;
icsneo::A2BPCMSample sample2 = 60;
msg.fill(sample1); // Fill whole message with sample 40
msg.fillFrame(sample2, numFrames/2); // Fill frame numFrames/2 with sample2
// Print msg sample contents
std::cout << "A2B message contents:" << std::endl;
for(size_t y = 0; y < numFrames; y++) {
for(size_t x = 0; x < ((size_t)(msg.getNumChannels())*2); x++) { // Num channels including upstream and downstream
std::cout << msg[y][x] << " ";
}
std::cout << std::endl;
}
// Set and get bits
msg.setSyncFrameBit(true);
std::cout << "Was received from monitor: " << msg.isMonitorMsg() << std::endl;
}
// Example 4: Packaging and transmitting sine wave using A2BMessage API
void example4(std::shared_ptr<icsneo::Device>& rada2b) {
std::cout << "Transmitting a 1000 hz sine wave." << std::endl;
/**
* Example 3: Packaging and transmitting sine tone using A2BMessage API
*/
void example3(const std::shared_ptr<icsneo::Device>& rada2b) {
std::cout << "Transmitting a 1000 hz sine tone." << std::endl;
float deltaTime = static_cast<float>(1.0/48000.0);
float elapsedTime = 0.0;
float twoPI = static_cast<float>(2.0*atan(1.0)*4.0);
float frequency = 1000;
float amplitude = static_cast<float>((1 << 23) - 1);
float amplitude = static_cast<float>((1 << 15) - 1);
size_t tdm = 4;
size_t bytesPerSample = 2;
uint8_t icsChannel = 0; // Play audio on channel 2, upstream, see docs for details
size_t numFrames = 2048 / (2 * tdm * bytesPerSample);
// Play for roughly 5 seconds
while(elapsedTime < 5.0) {
// Allocate message
std::shared_ptr<icsneo::A2BMessage> a2bmsgPtr = std::make_shared<icsneo::A2BMessage>(static_cast<uint8_t>(4), false, static_cast<size_t>(2048));
std::shared_ptr<icsneo::A2BMessage> a2bmsgPtr = std::make_shared<icsneo::A2BMessage>(numFrames, icsneo::A2BMessage::TDMMode::TDM4, true);
icsneo::A2BMessage& a2bmsg = *a2bmsgPtr.get();
a2bmsg.network = icsneo::Network(icsneo::Network::NetID::A2B2);
a2bmsg.txmsg = true;
for(size_t frame = 0; frame < a2bmsg.getNumFrames(); frame++) {
// Sine wave sample, amplitude 1000, frequency 1000 hz
// Sine tone sample, amplitude 1000, frequency 1000 hz
float contSample = amplitude*sin(twoPI*frequency*elapsedTime);
icsneo::A2BPCMSample sample = static_cast<icsneo::A2BPCMSample>(contSample);
icsneo::PCMSample sample = static_cast<icsneo::PCMSample>(contSample);
// Set sample for each frame in message
a2bmsg[frame][icsChannel] = sample;
// Send this sine wave sample downstream on channels 0, 1, and 2
a2bmsg.setChannelSample(
icsneo::A2BMessage::Direction::Downstream,
0,
frame,
sample,
icsneo::PCMType::L16
);
a2bmsg.setChannelSample(
icsneo::A2BMessage::Direction::Downstream,
1,
frame,
sample,
icsneo::PCMType::L16
);
a2bmsg.setChannelSample(
icsneo::A2BMessage::Direction::Downstream,
2,
frame,
sample,
icsneo::PCMType::L16
);
elapsedTime+=deltaTime;
}
@ -219,26 +240,10 @@ void example4(std::shared_ptr<icsneo::Device>& rada2b) {
}
// Example 5: Wave loop back
void example5(std::shared_ptr<icsneo::Device>& rada2b) {
auto listener = [&rada2b]() {
auto handler = rada2b->addMessageCallback(std::make_shared<icsneo::A2BWAVOutput>("looped.wav", 48000));
std::this_thread::sleep_for(std::chrono::seconds(5));
rada2b->removeMessageCallback(handler);
};
// Listen on another thread
std::thread listenerThread{listener};
// Transmit wave file using example0
example0(rada2b);
listenerThread.join();
}
// Example 6: Retrieving A2B bus status using I2C messaages.
void example6(std::shared_ptr<icsneo::Device>& rada2b) {
/**
* Example 4: Retrieving A2B bus status using I2C messaages.
*/
void example4(const std::shared_ptr<icsneo::Device>& rada2b) {
std::shared_ptr<icsneo::I2CMessage> msg = std::make_shared<icsneo::I2CMessage>();
std::shared_ptr<icsneo::MessageFilter> msgFilter = std::make_shared<icsneo::MessageFilter>(icsneo::Network::NetID::I2C2);
@ -320,6 +325,21 @@ void example6(std::shared_ptr<icsneo::Device>& rada2b) {
rada2b->removeMessageCallback(handler);
}
/**
* Example 5: Reading A2B sequence chart .puml file
*/
void example5(const std::shared_ptr<icsneo::Device>& rada2b) {
// The A2B sequence chart is located at binary index 0
constexpr uint16_t a2bSequenceChartIndex = 0;
// Create a ostream object to capture sequence chart data
std::ofstream a2bSequenceChart("a2b_sequence_chart.puml", std::ios::out | std::ios::binary);
if(!rada2b->readBinaryFile(a2bSequenceChart, a2bSequenceChartIndex)) {
std::cout << "Failed to read A2B sequence chart" << std::endl;
}
}
void displayUsage() {
std::cout << "libicsneo A2B example" << std::endl;
std::cout << "Example must be ran with rada2b as slave on TDM4 32 bit channel size and one ADI master node" << std::endl;
@ -332,11 +352,10 @@ void displayUsage() {
std::cout << "Example options:" << std::endl;
std::cout << "0\ttx" << std::endl;
std::cout << "1\trx" << std::endl;
std::cout << "2\tSet RADA2B settings" << std::endl;
std::cout << "3\tA2BMessage API" << std::endl;
std::cout << "4\tPackaging and transmitting sine wave using A2BMessage API" << std::endl;
std::cout << "5\tWave loopback" << std::endl;
std::cout << "6\tRead/write I2C registers on A2B board" << std::endl;
std::cout << "2\tSet RAD-A2B settings" << std::endl;
std::cout << "3\tPackaging and transmitting sine wav using A2BMessage API" << std::endl;
std::cout << "4\tRead/write I2C registers on A2B board" << std::endl;
std::cout << "5\tReading out A2B sequence chart .puml file" << std::endl;
}
int main(int argc, char** argv) {
@ -360,7 +379,7 @@ int main(int argc, char** argv) {
int option = atoi(arguments[2].c_str());
if(option < 0 || option > 6) {
if(option < 0 || option > 5) {
std::cerr << "Invalid usage." << std::endl;
displayUsage();
return EXIT_FAILURE;
@ -386,27 +405,27 @@ int main(int argc, char** argv) {
);
if(it == devices.end()) {
std::cerr << "Could not find RADA2B." << std::endl;
std::cerr << "Could not find RAD-A2B." << std::endl;
return EXIT_FAILURE;
}
std::shared_ptr<icsneo::Device> rada2b = *it;
if(!rada2b->open()) {
std::cout << "Failed to open RADA2B." << std::endl;
std::cout << "Failed to open RAD-A2B." << std::endl;
std::cout << icsneo::GetLastError() << std::endl;
return EXIT_FAILURE;
}
else {
std::cout << "Opened RADA2B." << std::endl;
std::cout << "Opened RAD-A2B." << std::endl;
}
if(!rada2b->goOnline()) {
std::cout << "Failed to go online with RADA2B." << std::endl;
std::cout << "Failed to go online with RAD-A2B." << std::endl;
std::cout << icsneo::GetLastError() << std::endl;
return EXIT_FAILURE;
}
else {
std::cout << "RADA2B online." << std::endl;
std::cout << "RAD-A2B online." << std::endl;
}
switch(option) {
@ -420,7 +439,7 @@ int main(int argc, char** argv) {
example2(rada2b);
break;
case 3:
example3();
example3(rada2b);
break;
case 4:
example4(rada2b);
@ -428,9 +447,6 @@ int main(int argc, char** argv) {
case 5:
example5(rada2b);
break;
case 6:
example6(rada2b);
break;
default:
break;
}

View File

@ -63,7 +63,9 @@ int main(int argc, char** argv) {
return EXIT_FAILURE;
}
if (!device->uploadCoremini(std::make_unique<std::ifstream>(arguments[2], std::ios::binary), type)) {
std::ifstream coreminiFile(arguments[2], std::ios::binary);
if (!device->uploadCoremini(coreminiFile, type)) {
std::cout << "Failed to upload coremini" << std::endl;
std::cout << icsneo::GetLastError() << std::endl;
}

View File

@ -5,435 +5,104 @@
#include "icsneo/communication/message/message.h"
#include "icsneo/api/eventmanager.h"
#include <algorithm>
#include <cstring>
#include <iostream>
#include <unordered_map>
#include "icsneo/communication/message/callback/streamoutput/streamoutput.h"
namespace icsneo {
typedef uint32_t A2BPCMSample;
using PCMSample = int32_t;
enum class PCMType : uint8_t {
L16,
L24,
L32
};
using ChannelMap = std::unordered_map<uint8_t, uint8_t>;
class A2BMessage : public Frame {
private:
class FrameView {
private:
class SampleView {
public:
SampleView(uint8_t* vPtr, uint8_t bps, size_t ind) :
index(ind), viewPtr(vPtr), bytesPerSample(bps) {}
operator A2BPCMSample() const {
if(!viewPtr) {
return 0;
}
A2BPCMSample sample = 0;
std::copy(viewPtr+index*bytesPerSample, viewPtr+(index+1)*bytesPerSample, (uint8_t*)&sample);
if(bytesPerSample == 4) {
sample = sample >> 8;
}
return sample;
}
SampleView& operator=(A2BPCMSample sample) {
if(!viewPtr) {
return *this;
}
if(bytesPerSample == 4) {
sample = sample << 8;
}
std::copy((uint8_t*)&sample, (uint8_t*)&sample + bytesPerSample, viewPtr + index*bytesPerSample);
return *this;
}
SampleView(const SampleView&) = delete;
SampleView& operator=(const SampleView&) = delete;
private:
size_t index;
uint8_t* viewPtr;
uint8_t bytesPerSample;
};
public:
FrameView(uint8_t* vPtr, uint8_t nChannels, uint8_t bps) : viewPtr(vPtr), tdm(nChannels), bytesPerSample(bps) {}
SampleView operator[](size_t index) {
if(index >= ((size_t)tdm) * 2) {
EventManager::GetInstance().add(APIEvent(APIEvent::Type::ParameterOutOfRange, APIEvent::Severity::Error));
return SampleView(nullptr, 0, 0);
}
return SampleView(viewPtr, bytesPerSample, index);
}
FrameView& operator=(const std::vector<A2BPCMSample>& samples) {
if(!viewPtr) {
return *this;
}
if(samples.size() != (size_t)(tdm)*2) {
EventManager::GetInstance().add(APIEvent(APIEvent::Type::BufferInsufficient, APIEvent::Severity::Error));
return *this;
}
for(size_t icsChannel = 0; icsChannel < ((size_t)(tdm) * 2); icsChannel++) {
operator[](icsChannel) = samples[icsChannel];
}
return *this;
}
FrameView(const FrameView&) = delete;
FrameView& operator=(const FrameView&) = delete;
private:
uint8_t* viewPtr;
uint8_t tdm;
uint8_t bytesPerSample;
};
public:
enum class A2BDirection : uint8_t {
static constexpr size_t maxAudioBufferSize = 2048;
enum class TDMMode : uint8_t {
TDM2 = 0,
TDM4 = 1,
TDM8 = 2,
TDM12 = 3,
TDM16 = 4,
TDM20 = 5,
TDM24 = 6,
TDM32 = 7,
};
static uint8_t tdmToChannelNum(TDMMode tdm);
enum class Direction : uint8_t {
Downstream = 0,
Upstream = 1
};
A2BMessage(uint8_t nChannels, bool chSize16, size_t size) :
numChannels(nChannels),
channelSize16(chSize16)
{
data.resize(std::min(roundNextMultiple(size, getFrameSize()),(size_t)maxSize), 0);
}
bool allocateSpace(size_t numSpaceToAdd) {
size_t spaceToAdd = roundNextMultiple(numSpaceToAdd, getFrameSize());
if(spaceToAdd + data.size() > maxSize) {
return false;
}
data.resize(data.size() + numSpaceToAdd, 0);
return true;
}
bool addFrame(const std::vector<A2BPCMSample>& frame) {
if(frame.size() != ((size_t)numChannels)*2) {
return false;
}
size_t oldSize = data.size();
if(!allocateSpace(getFrameSize())) {
return false;
}
auto it = data.begin() + oldSize;
size_t offset = 0;
for(A2BPCMSample sample: frame) {
if(!channelSize16) {
sample = sample << 8;
}
std::copy((uint8_t*)&sample, (uint8_t*)&sample + getBytesPerSample(), it + offset);
offset+=getBytesPerSample();
}
return true;
}
bool setFrame(const std::vector<A2BPCMSample>& frame, size_t frameNum) {
if(frame.size() != ((size_t)numChannels)*2 || frameNum >= getNumFrames()) {
return false;
}
auto it = data.begin() + frameNum*getFrameSize();
size_t offset = 0;
for(A2BPCMSample sample: frame) {
if(!channelSize16) {
sample = sample << 8;
}
std::copy((uint8_t*)&sample, (uint8_t*)&sample + getBytesPerSample(), it + offset);
offset+=getBytesPerSample();
}
return true;
}
bool fillChannelAudioBuffer(A2BDirection dir, uint8_t channel, std::vector<uint8_t>& channelBuffer) const {
if(channel >= numChannels) {
return false;
}
size_t offset = getChannelIndex(dir, channel)*getBytesPerSample();
for(size_t frame = 0; frame < getNumFrames(); frame++, offset += getFrameSize()) {
std::copy(data.begin() + offset, data.end() + offset + getBytesPerSample(), std::back_inserter(channelBuffer));
}
return true;
}
bool fillChannelStream(A2BDirection dir, uint8_t channel, std::unique_ptr<std::ostream>& channelStream) const {
if(channel >= numChannels) {
return false;
}
size_t offset = getChannelIndex(dir, channel)*getBytesPerSample();
for(size_t frame = 0; frame < getNumFrames(); frame++, offset += getFrameSize()) {
channelStream->write((const char*)(data.data() + offset), getBytesPerSample());
}
return true;
}
void fill(A2BPCMSample sample) {
uint8_t* buf = data.data();
if(channelSize16) {
uint16_t sample16bit = sample & 0xFF;
uint16_t* samps = (uint16_t*)buf;
std::fill(samps, samps + data.size()/2, sample16bit);
}
else {
A2BPCMSample* samps = (A2BPCMSample*)buf;
sample = sample << 8;
std::fill(samps, samps + data.size()/4, sample);
}
}
bool fillFrame(A2BPCMSample sample, size_t frame) {
if(frame >= getNumFrames()) {
return false;
}
uint8_t* buf = data.data();
size_t start = 2 * numChannels * frame;
size_t end = 2 * numChannels * (frame+1);
if(channelSize16) {
uint16_t sample16bit = sample & 0xFF;
uint16_t* samps = (uint16_t*)buf;
std::fill(samps+start, samps + end, sample16bit);
}
else {
A2BPCMSample* samps = (A2BPCMSample*)buf;
sample = sample << 8;
std::fill(samps+start, samps + end, sample);
}
return true;
}
template<typename Iterator>
bool setAudioBuffer(Iterator begin, Iterator end, A2BDirection dir, uint8_t channel, uint32_t frame) {
size_t offset = getChannelIndex(dir, channel)*getBytesPerSample() + frame * getFrameSize();
size_t dist = (size_t)(std::distance(begin, end));
if(dist > (data.size() - offset)) {
return false;
}
std::copy(begin, end, data.begin() + offset);
return true;
}
template<typename Iterator>
bool setAudioBuffer(Iterator begin, Iterator end) {
return setAudioBuffer(begin, end, A2BMessage::A2BDirection::Downstream, 0, 0);
}
std::optional<A2BPCMSample> getSample(A2BDirection dir, uint8_t channel, uint32_t frame) const {
if(
channel >= numChannels ||
frame >= getNumFrames()
) {
return std::nullopt;
}
A2BPCMSample sample = 0;
size_t offset = getChannelIndex(dir, channel)*getBytesPerSample() + frame * getFrameSize();
std::copy(data.begin() + offset, data.begin() + offset + getBytesPerSample(), (uint8_t*)&sample);
if(channelSize16) {
sample = sample >> 8;
}
return sample;
}
std::optional<A2BPCMSample> getSample(size_t sampleNum) const {
if(sampleNum >= getNumSamples()) {
return std::nullopt;
}
A2BPCMSample sample = 0;
size_t offset = sampleNum*getBytesPerSample();
std::copy(data.begin() + offset, data.begin() + offset + getBytesPerSample(), (uint8_t*)&sample);
if(channelSize16) {
sample = sample >> 8;
}
return sample;
}
bool setSample(A2BDirection dir, uint8_t channel, uint32_t frame, A2BPCMSample sample) {
if(
channel >= numChannels ||
frame >= getNumFrames()
) {
return false;
}
size_t offset = getChannelIndex(dir, channel)*getBytesPerSample() + frame * getFrameSize();
if(!channelSize16) {
sample = sample << 8;
}
uint8_t* sampToBytes = (uint8_t*)&sample;
std::copy(sampToBytes,sampToBytes+getBytesPerSample(), data.begin() + offset);
return true;
}
bool setSample(uint8_t icsChannel, uint32_t frame, A2BPCMSample sample) {
if(
icsChannel >= (2*numChannels) ||
frame >= getNumFrames()
) {
return false;
}
size_t offset = ((size_t)icsChannel)*getBytesPerSample() + frame * getFrameSize();
if(!channelSize16) {
sample = sample << 8;
}
uint8_t* sampToBytes = (uint8_t*)&sample;
std::copy(sampToBytes,sampToBytes+getBytesPerSample(), data.begin() + offset);
return true;
}
FrameView operator[](size_t index) {
if(index >= getNumFrames()) {
EventManager::GetInstance().add(APIEvent(APIEvent::Type::ParameterOutOfRange, APIEvent::Severity::Error));
return FrameView(nullptr, 0, 0);
}
return FrameView(data.data() + index*getFrameSize(), numChannels, getBytesPerSample());
}
size_t getNumSamples() const {
return data.size()/((size_t)getBytesPerSample());
}
uint8_t getNumChannels() const {
return numChannels;
}
uint8_t getBitDepth() const {
return channelSize16 ? 16 : 24;
}
uint8_t getBytesPerSample() const {
return channelSize16 ? 2 : 4;
}
bool isTxMsg() const {
return txmsg;
}
void setTxMsgBit(bool bit) {
txmsg = bit;
}
bool isMonitorMsg() const {
return monitor;
}
void setMonitorBit(bool bit) {
monitor = bit;
}
bool isErrIndicator() const {
return errIndicator;
}
void setErrIndicatorBit(bool bit) {
errIndicator = bit;
}
bool isSyncFrame() const {
return syncFrame;
}
void setSyncFrameBit(bool bit) {
syncFrame = bit;
}
uint16_t getRFU2() const {
return rfu2;
}
void setRFU2(uint16_t newRfu2) {
rfu2 = newRfu2;
}
size_t getFrameSize() const {
return 2*((size_t)numChannels) * ((size_t)getBytesPerSample());
}
size_t getNumFrames() const {
return data.size() / getFrameSize();
}
size_t getAudioBufferSize() const {
return data.size();
}
const uint8_t* getAudioBuffer() const {
return data.data();
}
static constexpr uint32_t maxSize = 2048;
private:
uint16_t rfu2 = 0;
uint8_t numChannels = 0;
bool channelSize16 = false;
bool monitor = false;
bool txmsg = false;
bool errIndicator = false;
bool syncFrame = false;
uint16_t rfu2 = 0;
size_t roundNextMultiple(size_t x, size_t y) const {
if(y==0) {
return 0;
}
else if(x%y == 0) {
return x;
}
A2BMessage() = default;
/**
* Creates a new A2BMessage
*
* @param numFrames The number of audio frames to hold in the message audio buffer
* @param tdm The TDM mode to transmit this message to, note this variable determines the number of channels
* @param chSize16 True if the message channel sizes are 16 bit, false for 32 bit.
*/
A2BMessage(size_t numFrames, TDMMode tdm, bool chSize16);
return x + y - (x%y);
}
/**
* Creates a new A2BMessage with the maximum number of possible frames
*
* @param tdm The TDM mode to transmit this message to, note this variable determines the number of channels
* @param chSize16 True if the message channel sizes are 16 bit, false for 32 bit.
*/
A2BMessage(TDMMode tdm, bool chSize16);
size_t getChannelIndex(A2BDirection dir, uint8_t channel) const {
size_t channelIndex = 2 * ((size_t)channel);
/**
* Loads A2BMessage audio buffer from a IWAVStream object representing a WAV data-stream
*
* @param wavStream The WAV data-stream the audio buffer with
* @param channelMap A map which maps a message channel to a wav channel. See docs for A2B message channel format
* @returns true on successful load, false otherwise
*/
bool loadAudioBuffer(IWAVStream& wavStream, const ChannelMap& channelMap);
if(dir == A2BDirection::Upstream) {
channelIndex++;
}
/**
* Get a PCM sample from the audio buffer. If the desired pcmType is larger than the channel size,
* the output will be a PCM sample which is scaled up.
*
* @param dir The direction of the A2B stream
* @param channel The desired channel to read a PCM sample from
* @param frame The desired frame to read a PCM sample from
* @param pcmType The interpretted bit depth of the audio buffer sample
*/
PCMSample getChannelSample(Direction dir, uint8_t channel, size_t frame, PCMType pcmType) const;
return channelIndex;
}
/**
* Write a PCM sample to the audio buffer
*
* @param dir The direction of the A2B stream
* @param channel The desired channel to write a PCM sample to
* @param frame The desired frame to write a PCM sample to
* @param sampleToSet The PCM sample which will be written to the buffer
* @param pcmType The interpretted bit depth of the sample to write
*/
void setChannelSample(Direction dir, uint8_t channel, size_t frame, PCMSample sampleToSet, PCMType pcmType);
size_t getFrameSize() const;
size_t getSampleOffset(Direction dir, uint8_t channel, size_t frame) const;
uint8_t getBytesPerChannel() const;
size_t getNumFrames() const;
};
}

View File

@ -1,85 +0,0 @@
#ifndef __A2BDECODER_H_
#define __A2BDECODER_H_
#ifdef __cplusplus
#include "icsneo/communication/message/callback/streamoutput/streamoutput.h"
#include "icsneo/communication/message/a2bmessage.h"
#include "icsneo/device/device.h"
namespace icsneo {
typedef uint8_t Channel;
class A2BAudioChannelMap {
public:
A2BAudioChannelMap(uint8_t tdm);
void set(Channel outChannel, A2BMessage::A2BDirection dir, Channel inChannel);
void setAll(Channel inChannel);
Channel get(Channel outChannel, A2BMessage::A2BDirection dir) const;
size_t size() const;
uint8_t getTDM() const;
Channel& operator[](size_t idx);
operator const std::vector<Channel>&() const;
private:
size_t getChannelIndex(Channel channel, A2BMessage::A2BDirection dir) const;
std::vector<Channel> rawMap;
};
class A2BDecoder {
public:
A2BDecoder(
std::unique_ptr<std::istream>&& streamOut,
bool chSize16,
const A2BAudioChannelMap& chMap
);
A2BDecoder(
const char* filename,
bool chSize16,
const A2BAudioChannelMap& chMap
);
operator bool() const;
std::shared_ptr<A2BMessage> decode();
bool outputAll(std::shared_ptr<Device> &device);
std::unique_ptr<std::istream> stream;
private:
void initializeFromHeader();
uint8_t tdm;
uint8_t audioBytesPerSample;
uint8_t channelsInWave;
bool channelSize16;
A2BAudioChannelMap channelMap;
std::vector<uint8_t> frame;
std::vector<uint8_t> frameWave;
bool initialized = false;
};
}
#endif // __cplusplus
#endif

View File

@ -9,33 +9,73 @@
namespace icsneo {
/**
* A message callback which injests A2BMessage PCM data and formats it into a WAV file
*/
class A2BWAVOutput : public StreamOutput {
public:
A2BWAVOutput(const char* filename, uint32_t sampleRate = 44100)
: StreamOutput(filename), wavSampleRate(sampleRate) {}
static constexpr size_t wavBufferSize = 1024 * 32;
A2BWAVOutput(std::unique_ptr<std::ostream>&& os, uint32_t sampleRate = 44100)
: StreamOutput(std::move(os)), wavSampleRate(sampleRate) {}
/**
* Creates a new A2BWAVOutput object
*
* @param filename Name of desired output WAV file
* @param channelMap A map which maps a channel in the output WAV file to a channel in received messages. See docs for specific channel format in messages
* @param bitDepth The size of the samples in the WAV file.
* @param numWAVChannels The number of channels in the output WAV file
* @param sampleRate The output WAV file sample rate
*/
A2BWAVOutput(
const char* filename,
const ChannelMap& channelMap,
PCMType bitDepth,
size_t numWAVChannels,
uint32_t sampleRate = 48000
);
void writeHeader(const std::shared_ptr<A2BMessage>& firstMsg) const;
/**
* Creates a new A2BWAVOutput object
*
* @param os A std::ostream object which represents this WAV file
* @param channelMap A map which maps a channel in the output WAV file to a channel in received messages. See docs for specific channel format in messages
* @param bitDepth The size of the samples in the WAV file.
* @param numWAVChannels The number of channels in the output WAV file
* @param sampleRate The output WAV file sample rate
*/
A2BWAVOutput(
std::ostream& os,
const ChannelMap& channelMap,
PCMType bitDepth,
size_t numWAVChannels,
uint32_t sampleRate = 48000
);
bool callIfMatch(const std::shared_ptr<Message>& message) const override;
void close() const;
~A2BWAVOutput() override {
if(!closed) {
close();
}
}
~A2BWAVOutput() override;
protected:
void close() const;
bool initialize();
uint32_t wavSampleRate;
/**
* Write and clear the current stored audio buffer
*/
bool writeCurrentBuffer() const;
mutable std::vector<uint8_t> wavBuffer; // A buffer which is used to cache PCM data to write to disk later
mutable size_t wavBufferOffset = 0; // Current offset in the above buffer, gets incremented as data is read into buffer
uint32_t wavSampleRate; // The output WAV sample rate
size_t bytesPerSampleWAV; // The number of bytes per sample in the output WAV file
size_t numChannelsWAV; // The number of channels in the output WAV file
ChannelMap chMap; // A map which maps a WAV channel to a A2BMessage channel
size_t maxMessageChannel; // The highest message channel in the above channel map, this variable is used for error checking
bool initialized = false;
mutable uint32_t streamStartPos;
mutable bool firstMessageFlag = true;
mutable bool closed = false;
};
}

View File

@ -14,33 +14,34 @@
namespace icsneo {
struct WaveFileHeader {
#pragma pack(push, 1)
struct WAVHeader {
static constexpr uint32_t WAVE_CHUNK_ID = 0x46464952; // "RIFF"
static constexpr uint32_t WAVE_FORMAT = 0x45564157; // "WAVE"
static constexpr uint32_t WAVE_SUBCHUNK1_ID = 0x20746d66; // "fmt "
static constexpr uint32_t WAVE_SUBCHUNK2_ID = 0x61746164; // "data"
static constexpr uint16_t WAVE_SUBCHUNK1_SIZE = 16;
static constexpr uint16_t WAVE_AUDIO_FORMAT_PCM = 1;
static constexpr uint32_t WAVE_DEFAULT_SIZE = 0; // Default size for streamed wav
static constexpr uint32_t WAV_CHUNK_ID = 0x46464952; // "RIFF"
static constexpr uint32_t WAV_FORMAT = 0x45564157; // "WAV"
static constexpr uint32_t WAV_SUBCHUNK1_ID = 0x20746d66; // "fmt "
static constexpr uint32_t WAV_SUBCHUNK2_ID = 0x61746164; // "data"
static constexpr uint16_t WAV_SUBCHUNK1_SIZE = 16;
static constexpr uint16_t WAV_AUDIO_FORMAT_PCM = 1;
static constexpr uint32_t WAV_DEFAULT_SIZE = 0; // Default size for streamed wav
uint32_t chunkId = WAVE_CHUNK_ID; // "RIFF"
uint32_t chunkSize = WAVE_DEFAULT_SIZE; // number of bytes to follow
uint32_t format = WAVE_FORMAT; // "WAVE"
uint32_t subchunk1Id = WAVE_SUBCHUNK1_ID; // "fmt "
uint32_t subchunk1Size = WAVE_SUBCHUNK1_SIZE; // number of bytes in *this* subchunk (always 16)
uint16_t audioFormat = WAVE_AUDIO_FORMAT_PCM; // 1 for PCM
uint32_t chunkId = WAV_CHUNK_ID; // "RIFF"
uint32_t chunkSize = WAV_DEFAULT_SIZE; // number of bytes to follow
uint32_t format = WAV_FORMAT; // "WAV"
uint32_t subchunk1Id = WAV_SUBCHUNK1_ID; // "fmt "
uint32_t subchunk1Size = WAV_SUBCHUNK1_SIZE; // number of bytes in *this* subchunk (always 16)
uint16_t audioFormat = WAV_AUDIO_FORMAT_PCM; // 1 for PCM
uint16_t numChannels; // number of channels
uint32_t sampleRate; // sample rate in Hz
uint32_t byteRate; // bytes per second of audio: sampleRate * numChannels * (bitsPerSample / 8)
uint16_t blockAlign; // alignment of each block in bytes: numChannels * (bitsPerSample / 8)
uint16_t bitsPerSample; // number of bits in each sample
uint32_t subchunk2Id = WAVE_SUBCHUNK2_ID; // "data"
uint32_t subchunk2Size = WAVE_DEFAULT_SIZE; // number of bytes to follow
uint32_t subchunk2Id = WAV_SUBCHUNK2_ID; // "data"
uint32_t subchunk2Size = WAV_DEFAULT_SIZE; // number of bytes to follow
WaveFileHeader() = default;
WAVHeader() = default;
WaveFileHeader(uint16_t nChannels, uint32_t sRate, uint16_t bps, uint32_t nSamples = 0) {
WAVHeader(uint16_t nChannels, uint32_t sRate, uint16_t bps, uint32_t nSamples = 0) {
setHeader(nChannels, sRate, bps, nSamples);
}
@ -59,48 +60,82 @@ struct WaveFileHeader {
subchunk2Size = numSamples * numChannels * (bitsPerSample / 8);
chunkSize = subchunk2Size + 36;
}
};
void write(const std::unique_ptr<std::ostream>& stream) {
#pragma pack(pop)
stream->write(reinterpret_cast<const char*>(&chunkId), 4);
stream->write(reinterpret_cast<const char*>(&chunkSize), 4);
stream->write(reinterpret_cast<const char*>(&format), 4);
stream->write(reinterpret_cast<const char*>(&subchunk1Id), 4);
stream->write(reinterpret_cast<const char*>(&subchunk1Size), 4);
stream->write(reinterpret_cast<const char*>(&audioFormat), 2);
stream->write(reinterpret_cast<const char*>(&numChannels), 2);
stream->write(reinterpret_cast<const char*>(&sampleRate), 4);
stream->write(reinterpret_cast<const char*>(&byteRate), 4);
stream->write(reinterpret_cast<const char*>(&blockAlign), 2);
stream->write(reinterpret_cast<const char*>(&bitsPerSample), 2);
stream->write(reinterpret_cast<const char*>(&subchunk2Id), 4);
stream->write(reinterpret_cast<const char*>(&subchunk2Size), 4);
class IWAVStream {
private:
std::unique_ptr<std::istream, std::function<void(std::istream*)>> stream;
bool initialized = false;
public:
WAVHeader header;
IWAVStream(std::istream& WAVInput)
: stream(&WAVInput, [](std::istream*){}) {
if(initialize()) {
initialized = true;
}
}
IWAVStream(const char* filename)
: stream(new std::ifstream(filename, std::ios::in | std::ios::binary), std::default_delete<std::istream>()) {
if(initialize()) {
initialized = true;
}
}
bool initialize() {
return !(!stream->read(reinterpret_cast<char*>(&header), sizeof(WAVHeader)));
}
operator bool() const {
return initialized && stream && stream->good();
}
bool read(char* into, std::streamsize num) {
return !(!stream->read(into, num));
}
/**
* Set stream immediately after WAV header
*/
void reset() {
if(!(*this)) {
return;
}
stream->clear();
stream->seekg(sizeof(icsneo::WAVHeader), std::ios::beg);
}
};
class StreamOutput : public MessageCallback {
public:
StreamOutput(std::unique_ptr<std::ostream>&& os, fn_messageCallback cb, std::shared_ptr<MessageFilter> f)
: MessageCallback(cb, f), stream(std::move(os)) {}
StreamOutput(std::ostream& os, fn_messageCallback cb, std::shared_ptr<MessageFilter> f)
: MessageCallback(cb, f), stream(&os, [](std::ostream*){}) {}
StreamOutput(const char* filename, fn_messageCallback cb, std::shared_ptr<MessageFilter> f)
: MessageCallback(cb, f) {
stream = std::make_unique<std::ofstream>(filename, std::ios::binary);
}
:
MessageCallback(cb, f),
stream(
new std::ofstream(filename, std::ios::binary),
std::default_delete<std::ostream>()
) {}
StreamOutput(const char* filename) : MessageCallback([](std::shared_ptr<Message> msg) {}) {
stream = std::make_unique<std::ofstream>(filename, std::ios::binary);
}
StreamOutput(const char* filename) :
MessageCallback([](std::shared_ptr<Message> msg) {}),
stream(
new std::ofstream(filename, std::ios::binary),
std::default_delete<std::ostream>()
) {}
StreamOutput(std::unique_ptr<std::ostream>&& os) : MessageCallback([](std::shared_ptr<Message> msg) {}), stream(std::move(os)) {}
StreamOutput(std::ostream& os) : MessageCallback([](std::shared_ptr<Message> msg) {}), stream(&os, [](std::ostream*){}) {}
protected:
std::unique_ptr<std::ostream> stream;
void write(void* msg, std::streamsize size) const {
stream->write(reinterpret_cast<const char*>(msg), size);
}
std::unique_ptr<std::ostream, std::function<void(std::ostream*)>> stream;
};
}

View File

@ -9,7 +9,7 @@
namespace icsneo {
class ExtendedDataMessage : public RawMessage {
class ExtendedDataMessage : public Frame {
public:
#pragma pack(push, 2)
struct ExtendedDataHeader {
@ -23,7 +23,7 @@ public:
static constexpr size_t MaxExtendedDataBufferSize = 2048;
const ExtendedDataHeader header;
ExtendedDataMessage(ExtendedDataHeader params) : RawMessage(Message::Type::RawMessage, Network::NetID::ExtendedData), header{params} {}
ExtendedDataMessage(ExtendedDataHeader params) : header{params} {}
};

View File

@ -158,7 +158,7 @@ public:
bool startScript(Disk::MemoryType memType = Disk::MemoryType::SD);
bool stopScript();
bool clearScript(Disk::MemoryType memType = Disk::MemoryType::SD);
bool uploadCoremini(std::unique_ptr<std::istream>&& stream, Disk::MemoryType memType = Disk::MemoryType::SD);
bool uploadCoremini(std::istream& stream, Disk::MemoryType memType = Disk::MemoryType::SD);
bool eraseScriptMemory(Disk::MemoryType memType, uint64_t amount);
@ -182,8 +182,14 @@ public:
return std::nullopt;
}
virtual bool supportsEraseMemory() const {
return false;
}
// Message polling related functions
bool enableMessagePolling();
bool disableMessagePolling();
bool isMessagePollingEnabled() { return messagePollingCallbackID != 0; };

View File

@ -54,6 +54,10 @@ protected:
return 0;
}
bool supportsEraseMemory() const override {
return true;
}
};
}

View File

@ -44,6 +44,10 @@ private:
std::optional<MemoryAddress> getCoreminiStartAddressSD() const override {
return 0;
}
bool supportsEraseMemory() const override {
return true;
}
};
}

View File

@ -137,6 +137,10 @@ protected:
std::optional<MemoryAddress> getCoreminiStartAddressSD() const override {
return 0;
}
bool supportsEraseMemory() const override {
return true;
}
};
}

View File

@ -84,6 +84,10 @@ protected:
std::optional<MemoryAddress> getCoreminiStartAddressSD() const override {
return 0;
}
bool supportsEraseMemory() const override {
return true;
}
};
}

View File

@ -86,6 +86,10 @@ protected:
std::optional<MemoryAddress> getCoreminiStartAddressSD() const override {
return 0;
}
bool supportsEraseMemory() const override {
return true;
}
};
}

View File

@ -69,6 +69,10 @@ protected:
std::optional<MemoryAddress> getCoreminiStartAddressSD() const override {
return 0;
}
bool supportsEraseMemory() const override {
return true;
}
};
}

View File

@ -156,11 +156,11 @@ public:
return static_cast<ChannelSize>(deviceSettings.flags & a2bSettingsFlag16bit);
}
uint8_t getChannelOffset(RADA2BDevice device, A2BMessage::A2BDirection dir) const {
uint8_t getChannelOffset(RADA2BDevice device, A2BMessage::Direction dir) const {
auto cfg = getStructurePointer<rada2b_settings_t>();
auto &deviceSettings = device == RADA2BDevice::Monitor ? cfg->a2b_monitor : cfg->a2b_node;
if(dir == A2BMessage::A2BDirection::Upstream) {
if(dir == A2BMessage::Direction::Upstream) {
return deviceSettings.upstreamChannelOffset;
}
@ -188,11 +188,11 @@ public:
deviceSettings.tdmMode = static_cast<uint8_t>(newMode);
}
void setChannelOffset(RADA2BDevice device, A2BMessage::A2BDirection dir, uint8_t newOffset) {
void setChannelOffset(RADA2BDevice device, A2BMessage::Direction dir, uint8_t newOffset) {
auto cfg = getMutableStructurePointer<rada2b_settings_t>();
auto &deviceSettings = device == RADA2BDevice::Monitor ? cfg->a2b_monitor : cfg->a2b_node;
if(dir == A2BMessage::A2BDirection::Upstream) {
if(dir == A2BMessage::Direction::Upstream) {
deviceSettings.upstreamChannelOffset = newOffset;
}
else {

View File

@ -49,6 +49,10 @@ protected:
std::optional<MemoryAddress> getCoreminiStartAddressSD() const override {
return 0;
}
bool supportsEraseMemory() const override {
return true;
}
};
}

View File

@ -57,6 +57,10 @@ protected:
std::optional<MemoryAddress> getCoreminiStartAddressSD() const override {
return 0;
}
bool supportsEraseMemory() const override {
return true;
}
};
}

View File

@ -52,6 +52,10 @@ protected:
std::optional<MemoryAddress> getCoreminiStartAddressSD() const override {
return 0;
}
bool supportsEraseMemory() const override {
return true;
}
};
}

View File

@ -62,6 +62,10 @@ protected:
std::optional<MemoryAddress> getCoreminiStartAddressSD() const override {
return 0;
}
bool supportsEraseMemory() const override {
return true;
}
};
}

View File

@ -58,6 +58,10 @@ protected:
// The supported TX networks are the same as the supported RX networks for this device
void setupSupportedTXNetworks(std::vector<Network>& txNetworks) override { setupSupportedRXNetworks(txNetworks); }
bool supportsEraseMemory() const override {
return true;
}
};
}

View File

@ -89,6 +89,10 @@ protected:
ValueCAN4::setupPacketizer(packetizer);
packetizer.align16bit = !com->driver->isEthernet();
}
bool supportsEraseMemory() const override {
return true;
}
};
}

View File

@ -69,6 +69,10 @@ protected:
// The supported TX networks are the same as the supported RX networks for this device
void setupSupportedTXNetworks(std::vector<Network>& txNetworks) override { setupSupportedRXNetworks(txNetworks); }
bool supportsEraseMemory() const override {
return true;
}
};
}

View File

@ -46,6 +46,10 @@ protected:
ValueCAN4::setupPacketizer(packetizer);
packetizer.align16bit = !com->driver->isEthernet();
}
bool supportsEraseMemory() const override {
return true;
}
};
}

View File

@ -43,6 +43,10 @@ protected:
std::optional<MemoryAddress> getCoreminiStartAddressSD() const override {
return 0;
}
bool supportsEraseMemory() const override {
return true;
}
};
}

View File

@ -29,8 +29,8 @@ protected:
std::vector<uint8_t> testBytes =
{0xaa, 0x0c, 0x15, 0x00, 0x0b, 0x02, 0x00, 0x00,
0x08, 0x00, 0x00, 0x00, 0x03, 0x02, 0x00, 0x00,
0x08, 0x04, 0x00, 0x00};
0x08, 0x00, 0x00, 0x00, 0xCC, 0xFF, 0x00, 0x00,
0x9A, 0xFF, 0x00, 0x00};
std::vector<uint8_t> recvBytes =
{0xaa, 0x00, 0x2a, 0x00, 0x0a, 0x02, 0x02, 0x01,
@ -44,12 +44,30 @@ protected:
TEST_F(A2BEncoderDecoderTest, PacketEncoderTest)
{
std::vector<uint8_t> bytestream;
auto messagePtr = std::make_shared<icsneo::A2BMessage>((uint8_t)2, true, 8);
auto messagePtr = std::make_shared<icsneo::A2BMessage>(
static_cast<size_t>(1u),
icsneo::A2BMessage::TDMMode::TDM2,
true
);
messagePtr->network = icsneo::Network::NetID::A2B2;
A2BMessage& message = *messagePtr.get();
message[0][0] = (0x02 << 8) | (0x03);
message[0][2] = (0x04 << 8) | (0x08);
message.setChannelSample(
icsneo::A2BMessage::Direction::Downstream,
static_cast<uint8_t>(0u),
0u,
-52,
icsneo::PCMType::L16
);
message.setChannelSample(
icsneo::A2BMessage::Direction::Downstream,
static_cast<uint8_t>(1u),
0u,
-102,
icsneo::PCMType::L16
);
packetEncoder->encode(*packetizer, bytestream, messagePtr);
EXPECT_EQ(bytestream, testBytes);
@ -58,14 +76,45 @@ TEST_F(A2BEncoderDecoderTest, PacketEncoderTest)
TEST_F(A2BEncoderDecoderTest, PacketDecoderTest)
{
std::shared_ptr<icsneo::Message> decodeMsg;
std::shared_ptr<icsneo::A2BMessage> message = std::make_shared<icsneo::A2BMessage>((uint8_t)2, true, 8);
auto message = std::make_shared<icsneo::A2BMessage>(
static_cast<size_t>(1u),
icsneo::A2BMessage::TDMMode::TDM2,
true
);
message->network = icsneo::Network::NetID::A2B1;
message->setTxMsgBit(false);
message->setMonitorBit(true);
message->txmsg = false;
message->monitor = true;
EXPECT_TRUE(message->setSample(0, 0, (0x02 << 8) | (0x03)));
EXPECT_TRUE(message->setSample(2, 0, (0x04 << 8) | (0x08)));
message->setChannelSample(
icsneo::A2BMessage::Direction::Downstream,
static_cast<uint8_t>(0u),
0u,
(0x02 << 8) | (0x03),
icsneo::PCMType::L16
);
message->setChannelSample(
icsneo::A2BMessage::Direction::Downstream,
static_cast<uint8_t>(1u),
0u,
(0x04 << 8) | (0x08),
icsneo::PCMType::L16
);
EXPECT_TRUE(message->getChannelSample(
icsneo::A2BMessage::Direction::Downstream,
static_cast<uint8_t>(0u),
0u,
icsneo::PCMType::L16
) == static_cast<icsneo::PCMSample>((0x02 << 8) | (0x03)));
EXPECT_TRUE(message->getChannelSample(
icsneo::A2BMessage::Direction::Downstream,
static_cast<uint8_t>(1u),
0u,
icsneo::PCMType::L16
) == static_cast<icsneo::PCMSample>((0x04 << 8) | (0x08)));
EXPECT_TRUE(packetizer->input(recvBytes));
auto packets = packetizer->output();
@ -76,10 +125,10 @@ TEST_F(A2BEncoderDecoderTest, PacketDecoderTest)
auto testMessage = std::dynamic_pointer_cast<icsneo::A2BMessage>(decodeMsg);
EXPECT_EQ(message->network, testMessage->network);
EXPECT_EQ(message->data, testMessage->data);
EXPECT_EQ(message->getNumChannels(), testMessage->getNumChannels());
EXPECT_EQ(message->isMonitorMsg(), testMessage->isMonitorMsg());
EXPECT_EQ(message->isTxMsg(), testMessage->isTxMsg());
EXPECT_EQ(message->isErrIndicator(), testMessage->isErrIndicator());
EXPECT_EQ(message->isSyncFrame(), testMessage->isSyncFrame());
EXPECT_EQ(message->getRFU2(), testMessage->getRFU2());
EXPECT_EQ(message->numChannels, testMessage->numChannels);
EXPECT_EQ(message->monitor, testMessage->monitor);
EXPECT_EQ(message->txmsg, testMessage->txmsg);
EXPECT_EQ(message->errIndicator, testMessage->errIndicator);
EXPECT_EQ(message->syncFrame, testMessage->syncFrame);
EXPECT_EQ(message->rfu2, testMessage->rfu2);
}