Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
254 changes: 249 additions & 5 deletions NeuralAmpModeler/NeuralAmpModeler.cpp
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
#include <algorithm> // std::clamp, std::min
#include <cmath> // pow
#include <filesystem>
#include <fstream> // std::ifstream for file reading
#include <iostream>
#include <utility>

Expand Down Expand Up @@ -74,7 +75,6 @@ const bool kDefaultCalibrateInput = false;
const std::string kInputCalibrationLevelParamName = "InputCalibrationLevel";
const double kDefaultInputCalibrationLevel = 12.0;


NeuralAmpModeler::NeuralAmpModeler(const InstanceInfo& info)
: Plugin(info, MakeConfig(kNumParams, kNumPresets))
{
Expand Down Expand Up @@ -183,7 +183,6 @@ NeuralAmpModeler::NeuralAmpModeler(const InstanceInfo& info)
{
// Sets mNAMPath and mStagedNAM
const std::string msg = _StageModel(fileName);
// TODO error messages like the IR loader.
if (msg.size())
{
std::stringstream ss;
Expand Down Expand Up @@ -407,16 +406,33 @@ void NeuralAmpModeler::OnIdle()

bool NeuralAmpModeler::SerializeState(IByteChunk& chunk) const
{
// If this isn't here when unserializing, then we know we're dealing with something before v0.8.0.
// If this isn't here when unserializing, then we know we're dealing with something before v0.7.13.
WDL_String header("###NeuralAmpModeler###"); // Don't change this!
chunk.PutStr(header.Get());
// Plugin version, so we can load legacy serialized states in the future!
WDL_String version(PLUG_VERSION_STR);
chunk.PutStr(version.Get());
// Model directory (don't serialize the model itself; we'll just load it again
// when we unserialize)

// Serialize file paths for backward compatibility
chunk.PutStr(mNAMPath.Get());
chunk.PutStr(mIRPath.Get());

// Embed the actual file data for portability
// Data was read when model/IR was loaded
int namDataSize = static_cast<int>(mNAMData.size());
chunk.Put(&namDataSize);
if (namDataSize > 0)
{
chunk.PutBytes(mNAMData.data(), namDataSize);
}

int irDataSize = static_cast<int>(mIRData.size());
chunk.Put(&irDataSize);
if (irDataSize > 0)
{
chunk.PutBytes(mIRData.data(), irDataSize);
}

return SerializeParams(chunk);
}

Expand Down Expand Up @@ -697,6 +713,18 @@ std::string NeuralAmpModeler::_StageModel(const WDL_String& modelPath)
temp->Reset(GetSampleRate(), GetBlockSize());
mStagedModel = std::move(temp);
mNAMPath = modelPath;

// Read file data for embedding in session
mNAMData.clear();
std::ifstream file(dspPath, std::ios::binary | std::ios::ate);
if (file.is_open())
{
std::streamsize size = file.tellg();
file.seekg(0, std::ios::beg);
mNAMData.resize(static_cast<size_t>(size));
file.read(reinterpret_cast<char*>(mNAMData.data()), size);
}

SendControlMsgFromDelegate(kCtrlTagModelFileBrowser, kMsgTagLoadedModel, mNAMPath.GetLength(), mNAMPath.Get());
}
catch (std::runtime_error& e)
Expand All @@ -721,6 +749,7 @@ dsp::wav::LoadReturnCode NeuralAmpModeler::_StageIR(const WDL_String& irPath)
// path and the model got caught on opposite sides of the fence...
WDL_String previousIRPath = mIRPath;
const double sampleRate = GetSampleRate();

dsp::wav::LoadReturnCode wavState = dsp::wav::LoadReturnCode::ERROR_OTHER;
try
{
Expand All @@ -738,6 +767,19 @@ dsp::wav::LoadReturnCode NeuralAmpModeler::_StageIR(const WDL_String& irPath)
if (wavState == dsp::wav::LoadReturnCode::SUCCESS)
{
mIRPath = irPath;

// Read file data for embedding in session
mIRData.clear();
auto irPathU8 = std::filesystem::u8path(irPath.Get());
std::ifstream file(irPathU8, std::ios::binary | std::ios::ate);
if (file.is_open())
{
std::streamsize size = file.tellg();
file.seekg(0, std::ios::beg);
mIRData.resize(static_cast<size_t>(size));
file.read(reinterpret_cast<char*>(mIRData.data()), size);
}

SendControlMsgFromDelegate(kCtrlTagIRFileBrowser, kMsgTagLoadedIR, mIRPath.GetLength(), mIRPath.Get());
}
else
Expand Down Expand Up @@ -911,5 +953,207 @@ void NeuralAmpModeler::_UpdateMeters(sample** inputPointer, sample** outputPoint
mOutputSender.ProcessBlock(outputPointer, (int)nFrames, kCtrlTagOutputMeter, nChansHack);
}

std::string NeuralAmpModeler::_StageModelFromData(const std::vector<uint8_t>& data, const WDL_String& originalPath)
{
WDL_String previousNAMPath = mNAMPath;
const double sampleRate = GetSampleRate();

try
{
// Parse the JSON from memory
std::string jsonStr(data.begin(), data.end());
nlohmann::json j = nlohmann::json::parse(jsonStr);

// Build dspData structure
nam::dspData dspData;
dspData.version = j["version"];
dspData.architecture = j["architecture"];
dspData.config = j["config"];
dspData.metadata = j["metadata"];

// Extract weights
if (j.find("weights") != j.end())
{
dspData.weights = j["weights"].get<std::vector<float>>();
}

// Extract sample rate
if (j.find("sample_rate") != j.end())
dspData.expected_sample_rate = j["sample_rate"];
else
dspData.expected_sample_rate = -1.0;

// Create DSP from dspData
std::unique_ptr<nam::DSP> model = nam::get_dsp(dspData);
std::unique_ptr<ResamplingNAM> temp = std::make_unique<ResamplingNAM>(std::move(model), sampleRate);
temp->Reset(sampleRate, GetBlockSize());
mStagedModel = std::move(temp);
mNAMPath = originalPath;
mNAMData = data; // Store the embedded data
SendControlMsgFromDelegate(kCtrlTagModelFileBrowser, kMsgTagLoadedModel, mNAMPath.GetLength(), mNAMPath.Get());
}
catch (std::exception& e)
{
SendControlMsgFromDelegate(kCtrlTagModelFileBrowser, kMsgTagLoadFailed);

if (mStagedModel != nullptr)
{
mStagedModel = nullptr;
}
mNAMPath = previousNAMPath;
std::cerr << "Failed to read DSP module from embedded data" << std::endl;
std::cerr << e.what() << std::endl;
return e.what();
}
return "";
}

dsp::wav::LoadReturnCode NeuralAmpModeler::_StageIRFromData(const std::vector<uint8_t>& data,
const WDL_String& originalPath)
{
WDL_String previousIRPath = mIRPath;
const double sampleRate = GetSampleRate();

dsp::wav::LoadReturnCode wavState = dsp::wav::LoadReturnCode::ERROR_OTHER;

try
{
// Parse WAV from memory
std::vector<float> audio;
double wavSampleRate = 0.0;

// Basic WAV parser for in-memory data
// WAV format: RIFF header (12 bytes) + fmt chunk + data chunk
if (data.size() < 44) // Minimum WAV file size
{
throw std::runtime_error("IR data too small to be valid WAV");
}

// Check RIFF header
if (data[0] != 'R' || data[1] != 'I' || data[2] != 'F' || data[3] != 'F')
{
throw std::runtime_error("Invalid WAV format - missing RIFF header");
}

// Check WAVE format
if (data[8] != 'W' || data[9] != 'A' || data[10] != 'V' || data[11] != 'E')
{
throw std::runtime_error("Invalid WAV format - not a WAVE file");
}

// Find fmt chunk
size_t pos = 12;
uint16_t audioFormat = 0;
uint16_t numChannels = 0;
uint32_t sampleRateInt = 0;
uint16_t bitsPerSample = 0;

while (pos < data.size() - 8)
{
std::string chunkID(data.begin() + pos, data.begin() + pos + 4);
uint32_t chunkSize = *reinterpret_cast<const uint32_t*>(&data[pos + 4]);

if (chunkID == "fmt ")
{
audioFormat = *reinterpret_cast<const uint16_t*>(&data[pos + 8]);
numChannels = *reinterpret_cast<const uint16_t*>(&data[pos + 10]);
sampleRateInt = *reinterpret_cast<const uint32_t*>(&data[pos + 12]);
bitsPerSample = *reinterpret_cast<const uint16_t*>(&data[pos + 22]);
wavSampleRate = static_cast<double>(sampleRateInt);
}
else if (chunkID == "data")
{
// Found data chunk
size_t dataStart = pos + 8;
size_t numSamples = chunkSize / (bitsPerSample / 8);

audio.resize(numSamples);

// Convert based on bits per sample
if (bitsPerSample == 16 && audioFormat == 1) // PCM 16-bit
{
for (size_t i = 0; i < numSamples; i++)
{
int16_t sample = *reinterpret_cast<const int16_t*>(&data[dataStart + i * 2]);
audio[i] = sample / 32768.0f;
}
}
else if (bitsPerSample == 24 && audioFormat == 1) // PCM 24-bit
{
for (size_t i = 0; i < numSamples; i++)
{
int32_t sample = 0;
sample |= static_cast<int32_t>(data[dataStart + i * 3]);
sample |= static_cast<int32_t>(data[dataStart + i * 3 + 1]) << 8;
sample |= static_cast<int32_t>(data[dataStart + i * 3 + 2]) << 16;
if (sample & 0x800000)
sample |= 0xFF000000; // Sign extend
audio[i] = sample / 8388608.0f;
}
}
else if (bitsPerSample == 32 && audioFormat == 3) // IEEE float 32-bit
{
for (size_t i = 0; i < numSamples; i++)
{
audio[i] = *reinterpret_cast<const float*>(&data[dataStart + i * 4]);
}
}
else
{
throw std::runtime_error("Unsupported WAV format");
}

break;
}

pos += 8 + chunkSize;
}

if (audio.empty())
{
throw std::runtime_error("No audio data found in WAV");
}

// Layer 9: Validate that fmt chunk was actually found and sample rate is valid
// WAV files can have missing fmt chunks or chunks in wrong order
if (wavSampleRate <= 0.0 || wavSampleRate != wavSampleRate)
{
throw std::runtime_error("Invalid or missing sample rate in WAV fmt chunk");
}

// Create IR from the loaded data
dsp::ImpulseResponse::IRData irData;
irData.mRawAudio = audio;
irData.mRawAudioSampleRate = wavSampleRate;

mStagedIR = std::make_unique<dsp::ImpulseResponse>(irData, sampleRate);
wavState = dsp::wav::LoadReturnCode::SUCCESS;
}
catch (std::exception& e)
{
wavState = dsp::wav::LoadReturnCode::ERROR_OTHER;
std::cerr << "Failed to load IR from embedded data:" << std::endl;
std::cerr << e.what() << std::endl;
}

if (wavState == dsp::wav::LoadReturnCode::SUCCESS)
{
mIRPath = originalPath;
mIRData = data; // Store the embedded data
SendControlMsgFromDelegate(kCtrlTagIRFileBrowser, kMsgTagLoadedIR, mIRPath.GetLength(), mIRPath.Get());
}
else
{
if (mStagedIR != nullptr)
{
mStagedIR = nullptr;
}
mIRPath = previousIRPath;
SendControlMsgFromDelegate(kCtrlTagIRFileBrowser, kMsgTagLoadFailed);
}

return wavState;
}

// HACK
#include "Unserialization.cpp"
8 changes: 8 additions & 0 deletions NeuralAmpModeler/NeuralAmpModeler.h
Original file line number Diff line number Diff line change
Expand Up @@ -220,10 +220,14 @@ class NeuralAmpModeler final : public iplug::Plugin
// Loads a NAM model and stores it to mStagedNAM
// Returns an empty string on success, or an error message on failure.
std::string _StageModel(const WDL_String& dspFile);
// Loads a NAM model from embedded binary data
std::string _StageModelFromData(const std::vector<uint8_t>& data, const WDL_String& originalPath);
// Loads an IR and stores it to mStagedIR.
// Return status code so that error messages can be relayed if
// it wasn't successful.
dsp::wav::LoadReturnCode _StageIR(const WDL_String& irPath);
// Loads an IR from embedded binary data
dsp::wav::LoadReturnCode _StageIRFromData(const std::vector<uint8_t>& data, const WDL_String& originalPath);

bool _HaveModel() const { return this->mModel != nullptr; };
// Prepare the input & output buffers
Expand Down Expand Up @@ -307,6 +311,10 @@ class NeuralAmpModeler final : public iplug::Plugin
// Path to IR (.wav file)
WDL_String mIRPath;

// Embedded file data for portability (stored with DAW session)
std::vector<uint8_t> mNAMData;
std::vector<uint8_t> mIRData;

WDL_String mHighLightColor{PluginColors::NAM_THEMECOLOR.ToColorCode()};

std::unordered_map<std::string, double> mNAMParams = {{"Input", 0.0}, {"Output", 0.0}};
Expand Down
Loading