// file : xsd/cxx/tree/generator.cxx // author : Boris Kolpackov // copyright : Copyright (c) 2005-2011 Code Synthesis Tools CC // license : GNU GPL v2 + exceptions; see accompanying LICENSE file #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include "../../../libxsd/xsd/cxx/version.hxx" using namespace std; using namespace cutl; using namespace XSDFrontend::SemanticGraph; // // typedef std::wifstream WideInputFileStream; typedef std::wofstream WideOutputFileStream; namespace CXX { namespace { char const copyright_gpl[] = "// Copyright (c) 2005-2011 Code Synthesis Tools CC\n" "//\n" "// This program was generated by CodeSynthesis XSD, an XML Schema to\n" "// C++ data binding compiler.\n" "//\n" "// This program is free software; you can redistribute it and/or modify\n" "// it under the terms of the GNU General Public License version 2 as\n" "// published by the Free Software Foundation.\n" "//\n" "// This program is distributed in the hope that it will be useful,\n" "// but WITHOUT ANY WARRANTY; without even the implied warranty of\n" "// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n" "// GNU General Public License for more details.\n" "//\n" "// You should have received a copy of the GNU General Public License\n" "// along with this program; if not, write to the Free Software\n" "// Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA\n" "//\n" "// In addition, as a special exception, Code Synthesis Tools CC gives\n" "// permission to link this program with the Xerces-C++ library (or with\n" "// modified versions of Xerces-C++ that use the same license as Xerces-C++),\n" "// and distribute linked combinations including the two. You must obey\n" "// the GNU General Public License version 2 in all respects for all of\n" "// the code used other than Xerces-C++. If you modify this copy of the\n" "// program, you may extend this exception to your version of the program,\n" "// but you are not obligated to do so. If you do not wish to do so, delete\n" "// this exception statement from your version.\n" "//\n" "// Furthermore, Code Synthesis Tools CC makes a special exception for\n" "// the Free/Libre and Open Source Software (FLOSS) which is described\n" "// in the accompanying FLOSSE file.\n" "//\n\n"; char const copyright_proprietary[] = "// Copyright (c) 2005-2011 Code Synthesis Tools CC\n" "//\n" "// This program was generated by CodeSynthesis XSD, an XML Schema\n" "// to C++ data binding compiler, in the Proprietary License mode.\n" "// You should have received a proprietary license from Code Synthesis\n" "// Tools CC prior to generating this code. See the license text for\n" "// conditions.\n" "//\n\n"; } void Tree::Generator:: usage () { CXX::Tree::options::print_usage (wcout); CXX::options::print_usage (wcout); } namespace { void open (WideInputFileStream& ifs, NarrowString const& path) { try { Path fs_path (path); ifs.open (fs_path.string ().c_str (), std::ios_base::in | std::ios_base::binary); if (!ifs.is_open ()) { wcerr << path.c_str () << ": error: unable to open in read mode" << endl; throw Tree::Generator::Failed (); } } catch (InvalidPath const&) { wcerr << "error: '" << path.c_str () << "' is not a valid " << "filesystem path" << endl; throw Tree::Generator::Failed (); } } void append (WideOutputFileStream& os, NarrowString const& path, WideInputFileStream& default_is) { using std::ios_base; if (path) { WideInputFileStream is; open (is, path); os << is.rdbuf (); } else if (default_is.is_open ()) { os << default_is.rdbuf (); default_is.seekg (0, ios_base::beg); } } void append (WideOutputFileStream& os, NarrowStrings const& primary, NarrowStrings const& def) { NarrowStrings const& v (primary.empty () ? def : primary); for (NarrowStrings::const_iterator i (v.begin ()), e (v.end ()); i != e; ++i) { os << i->c_str () << endl; } } } size_t Tree::Generator:: generate (Tree::options const& ops, Schema& schema, Path const& file_path, bool fpt, StringLiteralMap const& string_literal_map, const WarningSet& disabled_warnings, FileList& file_list, AutoUnlinks& unlinks) { using cutl::shared_ptr; typedef cutl::re::regexsub Regex; typedef vector Paths; typedef vector > WideOutputFileStreams; try { // Do option validation. // if (ops.parts () < 1) { wcerr << "error: invalid value for option --parts: " << ops.parts () << endl; throw Failed (); } // Get counts. // Counts counts; { Counter counter; counts = counter.count (ops, schema, file_path); /* wcerr << "global type count: " << counts.global_types << endl; wcerr << "global element count: " << counts.global_elements << endl; wcerr << "generated global element count: " << counts.generated_global_elements << endl; wcerr << "total complexity: " << counts.complexity_total << endl; wcerr << "complexity vector size: " << counts.complexity.size () << endl; */ } // Evaluate the graph for possibility of generating something useful. // { Validator validator; if (!validator.validate ( ops, schema, file_path, disabled_warnings, counts)) throw Failed (); } bool gen_cxx (!ops.generate_dep_only ()); // Process names. // if (gen_cxx) { NameProcessor proc; if (!proc.process (ops, schema, file_path, string_literal_map)) throw Failed (); } // Process polymorphic types. // if (gen_cxx && ops.generate_polymorphic () && !ops.polymorphic_type_all ()) { PolymorphismProcessor proc; if (!proc.process (ops, schema, file_path, disabled_warnings)) throw Failed (); } // Parts. // size_t parts (ops.parts ()); size_t units (counts.global_types + counts.generated_global_elements); size_t units_per_part (units / parts); if (parts != 1 && units_per_part < 1) { wcerr << "error: too many parts specified: " << parts << endl; throw Failed (); } size_t complexity_per_part (counts.complexity_total / parts); NarrowString parts_suffix (ops.parts_suffix ()); // // bool generate_xml_schema (ops.generate_xml_schema ()); // We could be compiling several schemas at once in which case // handling of the --generate-xml-schema option gets tricky: we // will need to rely on the presence of the --extern-xml-schema // to tell us which (fake) schema file corresponds to XML Schema. // if (generate_xml_schema) { if (NarrowString name = ops.extern_xml_schema ()) { if (file_path.string () != name) generate_xml_schema = false; } } bool header (true); bool inline_ (ops.generate_inline () && !generate_xml_schema); bool forward (ops.generate_forward () && !generate_xml_schema); bool source (!generate_xml_schema); bool gen_dep ((ops.generate_dep () || ops.generate_dep_only ()) && !generate_xml_schema); if (ops.generate_dep_only () && generate_xml_schema) { wcerr << "error: no dependency information can be generated for " "XML Schema header" << endl; throw Failed (); } if (gen_dep && fpt) { wcerr << "error: dependency generation not support in the " << "file-per-type mode" << endl; throw Failed (); } // Generate code. // NarrowString name (file_path.leaf ().string ()); NarrowString hxx_suffix (ops.hxx_suffix ()); NarrowString ixx_suffix (ops.ixx_suffix ()); NarrowString cxx_suffix (ops.cxx_suffix ()); NarrowString fwd_suffix (ops.fwd_suffix ()); NarrowString dep_suffix (ops.dep_suffix ()); Regex hxx_expr (ops.hxx_regex ().empty () ? "#^(.+?)(\\.[^./\\\\]+)?$#$1" + hxx_suffix + "#" : ops.hxx_regex ()); Regex ixx_expr (ops.ixx_regex ().empty () ? "#^(.+?)(\\.[^./\\\\]+)?$#$1" + ixx_suffix + "#" : ops.ixx_regex ()); Regex cxx_expr (ops.cxx_regex ().empty () ? "#^(.+?)(\\.[^./\\\\]+)?$#$1" + cxx_suffix + "#" : ops.cxx_regex ()); Regex fwd_expr (ops.fwd_regex ().empty () ? "#^(.+?)(\\.[^./\\\\]+)?$#$1" + fwd_suffix + "#" : ops.fwd_regex ()); Regex dep_expr (ops.dep_regex ().empty () ? "#^(.+?)(\\.[^./\\\\]+)?$#$1" + dep_suffix + "#" : ops.dep_regex ()); if (header && !hxx_expr.match (name)) { wcerr << "error: header expression '" << hxx_expr.regex ().str ().c_str () << "' does not match '" << name.c_str () << "'" << endl; throw Failed (); } if (inline_ && !ixx_expr.match (name)) { wcerr << "error: inline expression '" << ixx_expr.regex ().str ().c_str () << "' does not match '" << name.c_str () << "'" << endl; throw Failed (); } if (source && parts == 1 && !cxx_expr.match (name)) { wcerr << "error: source expression '" << cxx_expr.regex ().str ().c_str () << "' does not match '" << name.c_str () << "'" << endl; throw Failed (); } if (forward && !fwd_expr.match (name)) { wcerr << "error: forward expression '" << fwd_expr.regex ().str ().c_str () << "' does not match '" << name.c_str () << "'" << endl; throw Failed (); } if (gen_dep && !dep_expr.match (name)) { wcerr << "error: dependency expression '" << dep_expr.regex ().str ().c_str () << "' does not match '" << name.c_str () << "'" << endl; throw Failed (); } NarrowString hxx_name (header ? hxx_expr.replace (name) : NarrowString ()); NarrowString ixx_name (inline_ ? ixx_expr.replace (name) : NarrowString ()); NarrowString fwd_name (forward ? fwd_expr.replace (name) : NarrowString ()); NarrowString dep_name (gen_dep ? dep_expr.replace (name) : NarrowString ()); Path hxx_path (hxx_name); Path ixx_path (ixx_name); Path fwd_path (fwd_name); Path dep_path (dep_name); Paths cxx_paths; if (source) { if (parts > 1) { for (size_t i (0); i < parts; ++i) { std::ostringstream os; os << i; Regex expr ( "#^(.+?)(\\.[^./\\\\]+)?$#$1" + parts_suffix + os.str () + "$2#"); NarrowString part_name (expr.replace (name)); if (!cxx_expr.match (part_name)) { wcerr << "error: source expression '" << cxx_expr.regex ().str ().c_str () << "' does not match '" << part_name.c_str () << "'" << endl; throw Failed (); } cxx_paths.push_back (Path (cxx_expr.replace (part_name))); } } else cxx_paths.push_back (Path (cxx_expr.replace (name))); } Path out_dir; if (NarrowString dir = ops.output_dir ()) { try { out_dir = Path (dir); } catch (InvalidPath const&) { wcerr << dir.c_str () << ": error: invalid path" << endl; throw Failed (); } } if (fpt && !generate_xml_schema) { // In the file-per-type mode the schema files are always local // unless the user added the directory so that we propagate this // to the output files. // Path fpt_dir (file_path.directory ()); if (!fpt_dir.empty ()) out_dir /= fpt_dir; } if (!out_dir.empty ()) { hxx_path = out_dir / hxx_path; ixx_path = out_dir / ixx_path; fwd_path = out_dir / fwd_path; dep_path = out_dir / dep_path; for (Paths::iterator i (cxx_paths.begin ()); i != cxx_paths.end (); ++i) *i = out_dir / *i; } // // WideOutputFileStream hxx; WideOutputFileStream ixx; WideOutputFileStream fwd; WideOutputFileStream dep; WideOutputFileStreams cxx; // DEP // if (gen_dep) { dep.open (dep_path.string ().c_str (), ios_base::out); if (!dep.is_open ()) { wcerr << dep_path << ": error: unable to open in write mode" << endl; throw Failed (); } unlinks.add (dep_path); file_list.push_back (dep_path.string ()); } // FWD // if (gen_cxx && forward) { fwd.open (fwd_path.string ().c_str (), ios_base::out); if (!fwd.is_open ()) { wcerr << fwd_path << ": error: unable to open in write mode" << endl; throw Failed (); } unlinks.add (fwd_path); file_list.push_back (fwd_path.string ()); } // HXX // if (gen_cxx && header) { hxx.open (hxx_path.string ().c_str (), ios_base::out); if (!hxx.is_open ()) { wcerr << hxx_path << ": error: unable to open in write mode" << endl; throw Failed (); } unlinks.add (hxx_path); file_list.push_back (hxx_path.string ()); } // IXX // if (gen_cxx && inline_) { ixx.open (ixx_path.string ().c_str (), ios_base::out); if (!ixx.is_open ()) { wcerr << ixx_path << ": error: unable to open in write mode" << endl; throw Failed (); } unlinks.add (ixx_path); file_list.push_back (ixx_path.string ()); } // CXX // if (gen_cxx && source) { for (Paths::iterator i (cxx_paths.begin ()); i != cxx_paths.end (); ++i) { shared_ptr s ( new (shared) WideOutputFileStream ( i->string ().c_str (), ios_base::out)); if (!s->is_open ()) { wcerr << *i << ": error: unable to open in write mode" << endl; throw Failed (); } unlinks.add (*i); file_list.push_back (i->string ()); cxx.push_back (s); } } // Print copyright and license. // char const* copyright ( ops.proprietary_license () ? copyright_proprietary : copyright_gpl); if (gen_cxx && header) hxx << copyright; if (gen_cxx && forward) fwd << copyright; if (ops.generate_doxygen ()) { // Use native path format. // hxx << "/**" << endl << " * @file" << endl << " * @brief Generated from " << name.c_str () << "." << endl << " */" << endl << endl; } if (gen_cxx && inline_) ixx << copyright; if (gen_cxx && source) { for (WideOutputFileStreams::iterator i (cxx.begin ()); i != cxx.end (); ++i) **i << copyright; } // Prologue. // WideInputFileStream prologue; { NarrowString name (ops.prologue_file ()); if (name) open (prologue, name); } // Epilogue. // WideInputFileStream epilogue; { NarrowString name (ops.epilogue_file ()); if (name) open (epilogue, name); } // SLOC counter. // size_t sloc_total (0); bool show_sloc (ops.show_sloc ()); typedef compiler::ostream_filter ind_filter; typedef compiler::ostream_filter sloc_filter; // // Regex guard_expr ("/([a-z])([A-Z])/$1_$2/"); // Split words. NarrowString guard_prefix (ops.guard_prefix ()); if (!guard_prefix) guard_prefix = file_path.directory ().string (); if (guard_prefix) guard_prefix += '_'; // DEP // if (gen_dep) { NarrowString target; NarrowStrings const& ts (ops.dep_target ()); if (!ts.empty ()) { for (NarrowStrings::const_iterator i (ts.begin ()); i != ts.end (); ++i) target += (target.empty () ? "" : " \\\n") + *i; } else { target = hxx_path.string (); if (forward) target += " \\\n" + fwd_path.string (); if (inline_) target += " \\\n" + ixx_path.string (); for (Paths::iterator i (cxx_paths.begin ()); i != cxx_paths.end (); ++i) target += " \\\n" + i->string (); target += " \\\n" + dep_path.string (); } dep << target.c_str () << ':'; XSDFrontend::Generators::Dependencies gen; Paths prq (gen.generate (schema, file_path)); for (Paths::iterator i (prq.begin ()); i != prq.end (); ++i) dep << " \\" << endl << " " << *i; dep << endl; // If requested, generate phony rules for included/imported schemas // but not the main file which is the first in the list. // if (ops.dep_phony () && prq.size () > 1) { for (Paths::iterator i (prq.begin () + 1); i != prq.end (); ++i) dep << endl << *i << ':' << endl; } } // FWD // if (gen_cxx && forward) { Context ctx (fwd, schema, file_path, ops, counts, generate_xml_schema, &string_literal_map, &fwd_expr, &hxx_expr, &ixx_expr); sloc_filter sloc (fwd); // Guard // String guard (guard_expr.replace (guard_prefix + fwd_name)); guard = ctx.escape (guard); // make a c++ id std::transform (guard.begin (), guard.end(), guard.begin (), upcase); fwd << "#ifndef " << guard << endl << "#define " << guard << endl << endl; if (ctx.std >= cxx_version::cxx11) { fwd << "#ifndef XSD_CXX11" << endl << "#define XSD_CXX11" << endl << "#endif" << endl << endl; } if (ctx.char_type == L"char") { fwd << "#ifndef XSD_USE_CHAR" << endl << "#define XSD_USE_CHAR" << endl << "#endif" << endl << endl; fwd << "#ifndef XSD_CXX_TREE_USE_CHAR" << endl << "#define XSD_CXX_TREE_USE_CHAR" << endl << "#endif" << endl << endl; } else if (ctx.char_type == L"wchar_t") { fwd << "#ifndef XSD_USE_WCHAR" << endl << "#define XSD_USE_WCHAR" << endl << "#endif" << endl << endl; fwd << "#ifndef XSD_CXX_TREE_USE_WCHAR" << endl << "#define XSD_CXX_TREE_USE_WCHAR" << endl << "#endif" << endl << endl; } // Version check. // fwd << "#include " << endl << endl << "#if (XSD_INT_VERSION != " << XSD_INT_VERSION << "L)" << endl << "#error XSD runtime version mismatch" << endl << "#endif" << endl << endl; fwd << "#include " << endl << endl; // Copy prologue. // fwd << "// Begin prologue." << endl << "//" << endl; append (fwd, ops.fwd_prologue (), ops.prologue ()); append (fwd, ops.fwd_prologue_file (), prologue); fwd << "//" << endl << "// End prologue." << endl << endl; // Generate. // { ind_filter ind (fwd); // We don't want to indent prologues/epilogues. generate_forward (ctx); } // Copy epilogue. // fwd << "// Begin epilogue." << endl << "//" << endl; append (fwd, ops.fwd_epilogue_file (), epilogue); append (fwd, ops.fwd_epilogue (), ops.epilogue ()); fwd << "//" << endl << "// End epilogue." << endl << endl; fwd << "#include " << endl << endl; fwd << "#endif // " << guard << endl; if (show_sloc) wcerr << fwd_path << ": " << sloc.stream ().count () << endl; sloc_total += sloc.stream ().count (); } // HXX // if (gen_cxx && header) { Context ctx (hxx, schema, file_path, ops, counts, generate_xml_schema, &string_literal_map, &fwd_expr, &hxx_expr, &ixx_expr); sloc_filter sloc (hxx); // Guard // String guard (guard_expr.replace (guard_prefix + hxx_name)); guard = ctx.escape (guard); // make a c++ id std::transform (guard.begin (), guard.end(), guard.begin (), upcase); hxx << "#ifndef " << guard << endl << "#define " << guard << endl << endl; if (!forward) { if (ctx.std >= cxx_version::cxx11) { hxx << "#ifndef XSD_CXX11" << endl << "#define XSD_CXX11" << endl << "#endif" << endl << endl; } if (ctx.char_type == L"char") { hxx << "#ifndef XSD_USE_CHAR" << endl << "#define XSD_USE_CHAR" << endl << "#endif" << endl << endl; hxx << "#ifndef XSD_CXX_TREE_USE_CHAR" << endl << "#define XSD_CXX_TREE_USE_CHAR" << endl << "#endif" << endl << endl; } else if (ctx.char_type == L"wchar_t") { hxx << "#ifndef XSD_USE_WCHAR" << endl << "#define XSD_USE_WCHAR" << endl << "#endif" << endl << endl; hxx << "#ifndef XSD_CXX_TREE_USE_WCHAR" << endl << "#define XSD_CXX_TREE_USE_WCHAR" << endl << "#endif" << endl << endl; } } // Version check. // hxx << "#include " << endl << endl << "#if (XSD_INT_VERSION != " << XSD_INT_VERSION << "L)" << endl << "#error XSD runtime version mismatch" << endl << "#endif" << endl << endl; hxx << "#include " << endl << endl; // Copy prologue. // hxx << "// Begin prologue." << endl << "//" << endl; append (hxx, ops.hxx_prologue (), ops.prologue ()); append (hxx, ops.hxx_prologue_file (), prologue); hxx << "//" << endl << "// End prologue." << endl << endl; // Generate. // { ind_filter ind (hxx); // We don't want to indent prologues/epilogues. if (!generate_xml_schema) { if (forward) hxx << "#include " << ctx.process_include_path (fwd_name) << endl << endl; else generate_forward (ctx); } generate_tree_header (ctx); if (!generate_xml_schema) { if (ops.generate_ostream ()) generate_stream_header (ctx); if (!ops.generate_element_type () && !ops.suppress_parsing ()) generate_parser_header (ctx); if (ops.generate_serialization ()) generate_serialization_header (ctx); if (!ops.generate_insertion ().empty ()) generate_stream_insertion_header (ctx); } } if (inline_) { hxx << "#ifndef XSD_DONT_INCLUDE_INLINE" << endl << "#include " << ctx.process_include_path (ixx_name) << endl << "#endif // XSD_DONT_INCLUDE_INLINE" << endl << endl; } // Copy epilogue. // hxx << "// Begin epilogue." << endl << "//" << endl; append (hxx, ops.hxx_epilogue_file (), epilogue); append (hxx, ops.hxx_epilogue (), ops.epilogue ()); hxx << "//" << endl << "// End epilogue." << endl << endl; hxx << "#include " << endl << endl; hxx << "#endif // " << guard << endl; if (show_sloc) wcerr << hxx_path << ": " << sloc.stream ().count () << endl; sloc_total += sloc.stream ().count (); } // IXX // if (gen_cxx && inline_) { Context ctx (ixx, schema, file_path, ops, counts, generate_xml_schema, &string_literal_map, &fwd_expr, &hxx_expr, &ixx_expr); sloc_filter sloc (ixx); // Guard // String guard (guard_expr.replace (guard_prefix + ixx_name)); guard = ctx.escape (guard); // make a c++ id std::transform (guard.begin (), guard.end(), guard.begin (), upcase); ixx << "#ifndef " << guard.c_str () << endl << "#define " << guard.c_str () << endl << endl; // Copy prologue. // ixx << "// Begin prologue." << endl << "//" << endl; append (ixx, ops.ixx_prologue (), ops.prologue ()); append (ixx, ops.ixx_prologue_file (), prologue); ixx << "//" << endl << "// End prologue." << endl << endl; // Generate. // { ind_filter ind (ixx); // We don't want to indent prologues/epilogues. generate_tree_inline (ctx, 1, 0); } // Copy epilogue. // ixx << "// Begin epilogue." << endl << "//" << endl; append (ixx, ops.ixx_epilogue_file (), epilogue); append (ixx, ops.ixx_epilogue (), ops.epilogue ()); ixx << "//" << endl << "// End epilogue." << endl << endl; ixx << "#endif // " << guard.c_str () << endl; if (show_sloc) wcerr << ixx_path << ": " << sloc.stream ().count () << endl; sloc_total += sloc.stream ().count (); } // CXX // if (gen_cxx && source) { size_t first_unit (0); // First unit in the current part. for (size_t part (0); part < parts; ++part) { // Figure out the range of units for this part. // size_t last_unit (first_unit); if (units != 0) { size_t complexity (counts.complexity[last_unit]); while (complexity < complexity_per_part) { // Make sure there will be at least one unit for each part left. // if ((last_unit + 1) >= units || (units - (last_unit + 1) - 1) < (parts - part - 1)) break; // Check if the increase in complexity should be kept in this // part or moved to the next. // size_t new_complexity ( complexity + counts.complexity[last_unit + 1]); if (new_complexity > complexity_per_part) { if ((new_complexity - complexity_per_part) > (counts.complexity[last_unit + 1] / 2)) break; } last_unit++; complexity = new_complexity; } if (part + 1 == parts) { // Last part. // last_unit = units - 1; } } // // size_t first (first_unit); size_t last (last_unit); first_unit = last_unit + 1; //wcerr << "[" << first << ", " << last << "]: " << complexity // << endl; WideOutputFileStream& os (*cxx[part]); Context ctx (os, schema, file_path, ops, counts, generate_xml_schema, &string_literal_map, &fwd_expr, &hxx_expr, &ixx_expr); sloc_filter sloc (os); os << "#include " << endl << endl; // Copy prologue. // os << "// Begin prologue." << endl << "//" << endl; append (os, ops.cxx_prologue (), ops.prologue ()); append (os, ops.cxx_prologue_file (), prologue); os << "//" << endl << "// End prologue." << endl << endl; os << "#include " << ctx.process_include_path (hxx_name) << endl << endl; // Generate. // { // We don't want to indent prologues/epilogues. // ind_filter ind (os); if (!inline_) generate_tree_inline (ctx, first, last); generate_tree_source (ctx, first, last); if (ops.generate_ostream ()) generate_stream_source (ctx, first, last); if (!ops.generate_element_type () && !ops.suppress_parsing ()) generate_parser_source (ctx, first, last); if (ops.generate_serialization ()) generate_serialization_source (ctx, first, last); if (!ops.generate_extraction ().empty ()) generate_stream_extraction_source (ctx); if (!ops.generate_insertion ().empty ()) generate_stream_insertion_source (ctx); } // Copy epilogue. // os << "// Begin epilogue." << endl << "//" << endl; append (os, ops.cxx_epilogue_file (), epilogue); append (os, ops.cxx_epilogue (), ops.epilogue ()); os << "//" << endl << "// End epilogue." << endl << endl; os << "#include " << endl << endl; if (show_sloc) wcerr << cxx_paths[part] << ": " << sloc.stream ().count () << endl; sloc_total += sloc.stream ().count (); } } return sloc_total; } catch (UnrepresentableCharacter const& e) { wcerr << "error: character at position " << e.position () << " " << "in string '" << e.string () << "' is unrepresentable in " << "the target encoding" << endl; wcerr << "info: use the --custom-literals option to provide custom " << "string literals mapping" << endl; throw Failed (); } catch (NoNamespaceMapping const& e) { wcerr << e.file () << ":" << e.line () << ":" << e.column () << ": error: unable to map XML Schema namespace '" << e.ns () << "' to C++ namespace" << endl; wcerr << e.file () << ":" << e.line () << ":" << e.column () << ": info: use the --namespace-map or --namespace-regex option " << "to provide custom mapping" << endl; throw Failed (); } catch (InvalidNamespaceMapping const& e) { wcerr << "error: invalid XML to C++ namespace mapping specified: " << "'" << e.mapping () << "': " << e.reason () << endl; throw Failed (); } catch (InvalidCustomTypeMapping const& e) { wcerr << "error: invalid custom type mapping specified: " << "'" << e.mapping () << "': " << e.reason () << endl; throw Failed (); } catch (cutl::re::format const& e) { wcerr << "error: invalid regex: '" << e.regex ().c_str () << "': " << e.description ().c_str () << endl; throw Failed (); } catch (cutl::re::wformat const& e) { wcerr << "error: invalid regex: '" << e.regex () << "': " << e.description ().c_str () << endl; throw Failed (); } } }