libcruft-util/cpp.cpp

169 lines
5.0 KiB
C++

/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
* Copyright 2018 Danny Robson <danny@nerdcruft.net>
*/
///////////////////////////////////////////////////////////////////////////////
#include "cpp.hpp"
#include "io.hpp"
using cruft::cpp::include;
using cruft::cpp::passthrough;
using cruft::cpp::processor;
///////////////////////////////////////////////////////////////////////////////
processor::processor ()
{
m_directives.insert ({
"include",
std::make_unique<include> (*this)
});
}
//-----------------------------------------------------------------------------
void
processor::add (std::string token, std::unique_ptr<directive> handler)
{
m_directives.emplace (std::pair {std::move (token), std::move (handler)});
};
//-----------------------------------------------------------------------------
void
processor::process (std::ostream &os, const std::filesystem::path &src) const
{
auto data = cruft::slurp<char> (src);
context ctx;
ctx.source.push (src);
// Workaround to prevent an empty trailing line after line splitting
if (!data.empty () and data.back () == '\n')
data.pop_back ();
cruft::tokeniser<const char*> tok (data, '\n');
process (os, ctx, cruft::view (tok));
}
//-----------------------------------------------------------------------------
cruft::tokeniser<const char*>::iterator
processor::process (
std::ostream &os,
context &ctx,
cruft::view<cruft::tokeniser<const char*>::iterator> lines) const
{
for (auto l = lines.begin (), last = lines.end (); l != last; ++l) {
if (l->empty () || (*l)[0] != '#') {
os << *l << '\n';
continue;
}
auto tokens = cruft::tokeniser (*l, ' ');
auto head = tokens.begin ();
auto head_string = std::string { head->begin () + 1, head->size () - 1 };
if (equal (*head, "#endif"))
return l;
if (equal (*head, "#define")) {
auto key = head;
++key;
if (key == tokens.end ())
throw std::runtime_error ("expected token for define");
std::string key_string { key->begin (), key->size () };
auto val = key;
++val;
ctx.defines[key_string] = val == tokens.end () ? "" : std::string (val->begin (), val->size ());
continue;
}
if (equal (*head, "#ifndef")) {
auto tail = head + 1;
if (tail == tokens.end ())
throw std::runtime_error ("expected token for define");
// recurse and parse the block...
std::string name { tail->begin (), tail->end () };
if (ctx.defines.find (name) == ctx.defines.cend ()) {
l = process (os, ctx, {++l, lines.end ()});
// ... or skip until and endif
} else {
for (++l; l != lines.end () && !equal (*l, "#endif"); ++l)
;
}
// check if we've got the expected endif
if (l == lines.cend () || !equal (*l, "#endif"))
throw std::runtime_error ("expected #endif");
continue;
}
auto handler = m_directives.find (head_string);
if (handler == m_directives.end ())
throw unknown_directive (head_string);
lines = lines.consume (handler->second->process (os, ctx, {l, lines.end()}));
}
return lines.end ();
}
///////////////////////////////////////////////////////////////////////////////
passthrough::passthrough (const std::string &name):
m_name (name)
{ ; }
//-----------------------------------------------------------------------------
cruft::tokeniser<const char*>::iterator
passthrough::process (std::ostream &os,
context&,
cruft::view<cruft::tokeniser<const char*>::iterator> lines) const
{
os << *lines.begin () << '\n';
return lines.begin ()++;
}
///////////////////////////////////////////////////////////////////////////////
include::include (processor &_parent):
m_parent (_parent)
{ ; }
//-----------------------------------------------------------------------------
cruft::tokeniser<const char*>::iterator
include::process (std::ostream &os,
context &ctx,
cruft::view<cruft::tokeniser<const char*>::iterator> lines) const
{
const auto name = lines.begin ()->slice (strlen("#include '"), -2);
std::filesystem::path fragment { name.begin (), name.end () };
const auto target = ctx.source.top ().parent_path () / fragment;
auto data = cruft::slurp<char> (target);
if (!data.empty () and data.back () == '\n')
data.pop_back ();
ctx.source.push (target);
cruft::tokeniser<const char*> tok (data, '\n');
m_parent.process (os, ctx, cruft::view (tok.begin (), tok.end ()));
ctx.source.pop ();
return lines.begin ()++;
}