9798 lines
251 KiB
JavaScript
9798 lines
251 KiB
JavaScript
/******/ (() => { // webpackBootstrap
|
||
/******/ var __webpack_modules__ = ({
|
||
|
||
/***/ 913:
|
||
/***/ ((module) => {
|
||
|
||
var replacements = [
|
||
[/\*/g, '\\*', 'asterisks'],
|
||
[/#/g, '\\#', 'number signs'],
|
||
[/\//g, '\\/', 'slashes'],
|
||
[/\(/g, '\\(', 'parentheses'],
|
||
[/\)/g, '\\)', 'parentheses'],
|
||
[/\[/g, '\\[', 'square brackets'],
|
||
[/\]/g, '\\]', 'square brackets'],
|
||
[/</g, '<', 'angle brackets'],
|
||
[/>/g, '>', 'angle brackets'],
|
||
[/_/g, '\\_', 'underscores'],
|
||
[/`/g, '\\`', 'codeblocks']
|
||
]
|
||
|
||
module.exports = function (string, skips) {
|
||
skips = skips || []
|
||
return replacements.reduce(function (string, replacement) {
|
||
var name = replacement[2]
|
||
return name && skips.indexOf(name) !== -1
|
||
? string
|
||
: string.replace(replacement[0], replacement[1])
|
||
}, string)
|
||
}
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 43:
|
||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||
|
||
;(function (sax) { // wrapper for non-node envs
|
||
sax.parser = function (strict, opt) { return new SAXParser(strict, opt) }
|
||
sax.SAXParser = SAXParser
|
||
sax.SAXStream = SAXStream
|
||
sax.createStream = createStream
|
||
|
||
// When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns.
|
||
// When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)),
|
||
// since that's the earliest that a buffer overrun could occur. This way, checks are
|
||
// as rare as required, but as often as necessary to ensure never crossing this bound.
|
||
// Furthermore, buffers are only tested at most once per write(), so passing a very
|
||
// large string into write() might have undesirable effects, but this is manageable by
|
||
// the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme
|
||
// edge case, result in creating at most one complete copy of the string passed in.
|
||
// Set to Infinity to have unlimited buffers.
|
||
sax.MAX_BUFFER_LENGTH = 64 * 1024
|
||
|
||
var buffers = [
|
||
'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype',
|
||
'procInstName', 'procInstBody', 'entity', 'attribName',
|
||
'attribValue', 'cdata', 'script'
|
||
]
|
||
|
||
sax.EVENTS = [
|
||
'text',
|
||
'processinginstruction',
|
||
'sgmldeclaration',
|
||
'doctype',
|
||
'comment',
|
||
'opentagstart',
|
||
'attribute',
|
||
'opentag',
|
||
'closetag',
|
||
'opencdata',
|
||
'cdata',
|
||
'closecdata',
|
||
'error',
|
||
'end',
|
||
'ready',
|
||
'script',
|
||
'opennamespace',
|
||
'closenamespace'
|
||
]
|
||
|
||
function SAXParser (strict, opt) {
|
||
if (!(this instanceof SAXParser)) {
|
||
return new SAXParser(strict, opt)
|
||
}
|
||
|
||
var parser = this
|
||
clearBuffers(parser)
|
||
parser.q = parser.c = ''
|
||
parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH
|
||
parser.opt = opt || {}
|
||
parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags
|
||
parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase'
|
||
parser.tags = []
|
||
parser.closed = parser.closedRoot = parser.sawRoot = false
|
||
parser.tag = parser.error = null
|
||
parser.strict = !!strict
|
||
parser.noscript = !!(strict || parser.opt.noscript)
|
||
parser.state = S.BEGIN
|
||
parser.strictEntities = parser.opt.strictEntities
|
||
parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES)
|
||
parser.attribList = []
|
||
|
||
// namespaces form a prototype chain.
|
||
// it always points at the current tag,
|
||
// which protos to its parent tag.
|
||
if (parser.opt.xmlns) {
|
||
parser.ns = Object.create(rootNS)
|
||
}
|
||
|
||
// mostly just for error reporting
|
||
parser.trackPosition = parser.opt.position !== false
|
||
if (parser.trackPosition) {
|
||
parser.position = parser.line = parser.column = 0
|
||
}
|
||
emit(parser, 'onready')
|
||
}
|
||
|
||
if (!Object.create) {
|
||
Object.create = function (o) {
|
||
function F () {}
|
||
F.prototype = o
|
||
var newf = new F()
|
||
return newf
|
||
}
|
||
}
|
||
|
||
if (!Object.keys) {
|
||
Object.keys = function (o) {
|
||
var a = []
|
||
for (var i in o) if (o.hasOwnProperty(i)) a.push(i)
|
||
return a
|
||
}
|
||
}
|
||
|
||
function checkBufferLength (parser) {
|
||
var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10)
|
||
var maxActual = 0
|
||
for (var i = 0, l = buffers.length; i < l; i++) {
|
||
var len = parser[buffers[i]].length
|
||
if (len > maxAllowed) {
|
||
// Text/cdata nodes can get big, and since they're buffered,
|
||
// we can get here under normal conditions.
|
||
// Avoid issues by emitting the text node now,
|
||
// so at least it won't get any bigger.
|
||
switch (buffers[i]) {
|
||
case 'textNode':
|
||
closeText(parser)
|
||
break
|
||
|
||
case 'cdata':
|
||
emitNode(parser, 'oncdata', parser.cdata)
|
||
parser.cdata = ''
|
||
break
|
||
|
||
case 'script':
|
||
emitNode(parser, 'onscript', parser.script)
|
||
parser.script = ''
|
||
break
|
||
|
||
default:
|
||
error(parser, 'Max buffer length exceeded: ' + buffers[i])
|
||
}
|
||
}
|
||
maxActual = Math.max(maxActual, len)
|
||
}
|
||
// schedule the next check for the earliest possible buffer overrun.
|
||
var m = sax.MAX_BUFFER_LENGTH - maxActual
|
||
parser.bufferCheckPosition = m + parser.position
|
||
}
|
||
|
||
function clearBuffers (parser) {
|
||
for (var i = 0, l = buffers.length; i < l; i++) {
|
||
parser[buffers[i]] = ''
|
||
}
|
||
}
|
||
|
||
function flushBuffers (parser) {
|
||
closeText(parser)
|
||
if (parser.cdata !== '') {
|
||
emitNode(parser, 'oncdata', parser.cdata)
|
||
parser.cdata = ''
|
||
}
|
||
if (parser.script !== '') {
|
||
emitNode(parser, 'onscript', parser.script)
|
||
parser.script = ''
|
||
}
|
||
}
|
||
|
||
SAXParser.prototype = {
|
||
end: function () { end(this) },
|
||
write: write,
|
||
resume: function () { this.error = null; return this },
|
||
close: function () { return this.write(null) },
|
||
flush: function () { flushBuffers(this) }
|
||
}
|
||
|
||
var Stream
|
||
try {
|
||
Stream = (__nccwpck_require__(781).Stream)
|
||
} catch (ex) {
|
||
Stream = function () {}
|
||
}
|
||
|
||
var streamWraps = sax.EVENTS.filter(function (ev) {
|
||
return ev !== 'error' && ev !== 'end'
|
||
})
|
||
|
||
function createStream (strict, opt) {
|
||
return new SAXStream(strict, opt)
|
||
}
|
||
|
||
function SAXStream (strict, opt) {
|
||
if (!(this instanceof SAXStream)) {
|
||
return new SAXStream(strict, opt)
|
||
}
|
||
|
||
Stream.apply(this)
|
||
|
||
this._parser = new SAXParser(strict, opt)
|
||
this.writable = true
|
||
this.readable = true
|
||
|
||
var me = this
|
||
|
||
this._parser.onend = function () {
|
||
me.emit('end')
|
||
}
|
||
|
||
this._parser.onerror = function (er) {
|
||
me.emit('error', er)
|
||
|
||
// if didn't throw, then means error was handled.
|
||
// go ahead and clear error, so we can write again.
|
||
me._parser.error = null
|
||
}
|
||
|
||
this._decoder = null
|
||
|
||
streamWraps.forEach(function (ev) {
|
||
Object.defineProperty(me, 'on' + ev, {
|
||
get: function () {
|
||
return me._parser['on' + ev]
|
||
},
|
||
set: function (h) {
|
||
if (!h) {
|
||
me.removeAllListeners(ev)
|
||
me._parser['on' + ev] = h
|
||
return h
|
||
}
|
||
me.on(ev, h)
|
||
},
|
||
enumerable: true,
|
||
configurable: false
|
||
})
|
||
})
|
||
}
|
||
|
||
SAXStream.prototype = Object.create(Stream.prototype, {
|
||
constructor: {
|
||
value: SAXStream
|
||
}
|
||
})
|
||
|
||
SAXStream.prototype.write = function (data) {
|
||
if (typeof Buffer === 'function' &&
|
||
typeof Buffer.isBuffer === 'function' &&
|
||
Buffer.isBuffer(data)) {
|
||
if (!this._decoder) {
|
||
var SD = (__nccwpck_require__(576).StringDecoder)
|
||
this._decoder = new SD('utf8')
|
||
}
|
||
data = this._decoder.write(data)
|
||
}
|
||
|
||
this._parser.write(data.toString())
|
||
this.emit('data', data)
|
||
return true
|
||
}
|
||
|
||
SAXStream.prototype.end = function (chunk) {
|
||
if (chunk && chunk.length) {
|
||
this.write(chunk)
|
||
}
|
||
this._parser.end()
|
||
return true
|
||
}
|
||
|
||
SAXStream.prototype.on = function (ev, handler) {
|
||
var me = this
|
||
if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) {
|
||
me._parser['on' + ev] = function () {
|
||
var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments)
|
||
args.splice(0, 0, ev)
|
||
me.emit.apply(me, args)
|
||
}
|
||
}
|
||
|
||
return Stream.prototype.on.call(me, ev, handler)
|
||
}
|
||
|
||
// this really needs to be replaced with character classes.
|
||
// XML allows all manner of ridiculous numbers and digits.
|
||
var CDATA = '[CDATA['
|
||
var DOCTYPE = 'DOCTYPE'
|
||
var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace'
|
||
var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/'
|
||
var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE }
|
||
|
||
// http://www.w3.org/TR/REC-xml/#NT-NameStartChar
|
||
// This implementation works on strings, a single character at a time
|
||
// as such, it cannot ever support astral-plane characters (10000-EFFFF)
|
||
// without a significant breaking change to either this parser, or the
|
||
// JavaScript language. Implementation of an emoji-capable xml parser
|
||
// is left as an exercise for the reader.
|
||
var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/
|
||
|
||
var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/
|
||
|
||
var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/
|
||
var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/
|
||
|
||
function isWhitespace (c) {
|
||
return c === ' ' || c === '\n' || c === '\r' || c === '\t'
|
||
}
|
||
|
||
function isQuote (c) {
|
||
return c === '"' || c === '\''
|
||
}
|
||
|
||
function isAttribEnd (c) {
|
||
return c === '>' || isWhitespace(c)
|
||
}
|
||
|
||
function isMatch (regex, c) {
|
||
return regex.test(c)
|
||
}
|
||
|
||
function notMatch (regex, c) {
|
||
return !isMatch(regex, c)
|
||
}
|
||
|
||
var S = 0
|
||
sax.STATE = {
|
||
BEGIN: S++, // leading byte order mark or whitespace
|
||
BEGIN_WHITESPACE: S++, // leading whitespace
|
||
TEXT: S++, // general stuff
|
||
TEXT_ENTITY: S++, // & and such.
|
||
OPEN_WAKA: S++, // <
|
||
SGML_DECL: S++, // <!BLARG
|
||
SGML_DECL_QUOTED: S++, // <!BLARG foo "bar
|
||
DOCTYPE: S++, // <!DOCTYPE
|
||
DOCTYPE_QUOTED: S++, // <!DOCTYPE "//blah
|
||
DOCTYPE_DTD: S++, // <!DOCTYPE "//blah" [ ...
|
||
DOCTYPE_DTD_QUOTED: S++, // <!DOCTYPE "//blah" [ "foo
|
||
COMMENT_STARTING: S++, // <!-
|
||
COMMENT: S++, // <!--
|
||
COMMENT_ENDING: S++, // <!-- blah -
|
||
COMMENT_ENDED: S++, // <!-- blah --
|
||
CDATA: S++, // <![CDATA[ something
|
||
CDATA_ENDING: S++, // ]
|
||
CDATA_ENDING_2: S++, // ]]
|
||
PROC_INST: S++, // <?hi
|
||
PROC_INST_BODY: S++, // <?hi there
|
||
PROC_INST_ENDING: S++, // <?hi "there" ?
|
||
OPEN_TAG: S++, // <strong
|
||
OPEN_TAG_SLASH: S++, // <strong /
|
||
ATTRIB: S++, // <a
|
||
ATTRIB_NAME: S++, // <a foo
|
||
ATTRIB_NAME_SAW_WHITE: S++, // <a foo _
|
||
ATTRIB_VALUE: S++, // <a foo=
|
||
ATTRIB_VALUE_QUOTED: S++, // <a foo="bar
|
||
ATTRIB_VALUE_CLOSED: S++, // <a foo="bar"
|
||
ATTRIB_VALUE_UNQUOTED: S++, // <a foo=bar
|
||
ATTRIB_VALUE_ENTITY_Q: S++, // <foo bar="""
|
||
ATTRIB_VALUE_ENTITY_U: S++, // <foo bar="
|
||
CLOSE_TAG: S++, // </a
|
||
CLOSE_TAG_SAW_WHITE: S++, // </a >
|
||
SCRIPT: S++, // <script> ...
|
||
SCRIPT_ENDING: S++ // <script> ... <
|
||
}
|
||
|
||
sax.XML_ENTITIES = {
|
||
'amp': '&',
|
||
'gt': '>',
|
||
'lt': '<',
|
||
'quot': '"',
|
||
'apos': "'"
|
||
}
|
||
|
||
sax.ENTITIES = {
|
||
'amp': '&',
|
||
'gt': '>',
|
||
'lt': '<',
|
||
'quot': '"',
|
||
'apos': "'",
|
||
'AElig': 198,
|
||
'Aacute': 193,
|
||
'Acirc': 194,
|
||
'Agrave': 192,
|
||
'Aring': 197,
|
||
'Atilde': 195,
|
||
'Auml': 196,
|
||
'Ccedil': 199,
|
||
'ETH': 208,
|
||
'Eacute': 201,
|
||
'Ecirc': 202,
|
||
'Egrave': 200,
|
||
'Euml': 203,
|
||
'Iacute': 205,
|
||
'Icirc': 206,
|
||
'Igrave': 204,
|
||
'Iuml': 207,
|
||
'Ntilde': 209,
|
||
'Oacute': 211,
|
||
'Ocirc': 212,
|
||
'Ograve': 210,
|
||
'Oslash': 216,
|
||
'Otilde': 213,
|
||
'Ouml': 214,
|
||
'THORN': 222,
|
||
'Uacute': 218,
|
||
'Ucirc': 219,
|
||
'Ugrave': 217,
|
||
'Uuml': 220,
|
||
'Yacute': 221,
|
||
'aacute': 225,
|
||
'acirc': 226,
|
||
'aelig': 230,
|
||
'agrave': 224,
|
||
'aring': 229,
|
||
'atilde': 227,
|
||
'auml': 228,
|
||
'ccedil': 231,
|
||
'eacute': 233,
|
||
'ecirc': 234,
|
||
'egrave': 232,
|
||
'eth': 240,
|
||
'euml': 235,
|
||
'iacute': 237,
|
||
'icirc': 238,
|
||
'igrave': 236,
|
||
'iuml': 239,
|
||
'ntilde': 241,
|
||
'oacute': 243,
|
||
'ocirc': 244,
|
||
'ograve': 242,
|
||
'oslash': 248,
|
||
'otilde': 245,
|
||
'ouml': 246,
|
||
'szlig': 223,
|
||
'thorn': 254,
|
||
'uacute': 250,
|
||
'ucirc': 251,
|
||
'ugrave': 249,
|
||
'uuml': 252,
|
||
'yacute': 253,
|
||
'yuml': 255,
|
||
'copy': 169,
|
||
'reg': 174,
|
||
'nbsp': 160,
|
||
'iexcl': 161,
|
||
'cent': 162,
|
||
'pound': 163,
|
||
'curren': 164,
|
||
'yen': 165,
|
||
'brvbar': 166,
|
||
'sect': 167,
|
||
'uml': 168,
|
||
'ordf': 170,
|
||
'laquo': 171,
|
||
'not': 172,
|
||
'shy': 173,
|
||
'macr': 175,
|
||
'deg': 176,
|
||
'plusmn': 177,
|
||
'sup1': 185,
|
||
'sup2': 178,
|
||
'sup3': 179,
|
||
'acute': 180,
|
||
'micro': 181,
|
||
'para': 182,
|
||
'middot': 183,
|
||
'cedil': 184,
|
||
'ordm': 186,
|
||
'raquo': 187,
|
||
'frac14': 188,
|
||
'frac12': 189,
|
||
'frac34': 190,
|
||
'iquest': 191,
|
||
'times': 215,
|
||
'divide': 247,
|
||
'OElig': 338,
|
||
'oelig': 339,
|
||
'Scaron': 352,
|
||
'scaron': 353,
|
||
'Yuml': 376,
|
||
'fnof': 402,
|
||
'circ': 710,
|
||
'tilde': 732,
|
||
'Alpha': 913,
|
||
'Beta': 914,
|
||
'Gamma': 915,
|
||
'Delta': 916,
|
||
'Epsilon': 917,
|
||
'Zeta': 918,
|
||
'Eta': 919,
|
||
'Theta': 920,
|
||
'Iota': 921,
|
||
'Kappa': 922,
|
||
'Lambda': 923,
|
||
'Mu': 924,
|
||
'Nu': 925,
|
||
'Xi': 926,
|
||
'Omicron': 927,
|
||
'Pi': 928,
|
||
'Rho': 929,
|
||
'Sigma': 931,
|
||
'Tau': 932,
|
||
'Upsilon': 933,
|
||
'Phi': 934,
|
||
'Chi': 935,
|
||
'Psi': 936,
|
||
'Omega': 937,
|
||
'alpha': 945,
|
||
'beta': 946,
|
||
'gamma': 947,
|
||
'delta': 948,
|
||
'epsilon': 949,
|
||
'zeta': 950,
|
||
'eta': 951,
|
||
'theta': 952,
|
||
'iota': 953,
|
||
'kappa': 954,
|
||
'lambda': 955,
|
||
'mu': 956,
|
||
'nu': 957,
|
||
'xi': 958,
|
||
'omicron': 959,
|
||
'pi': 960,
|
||
'rho': 961,
|
||
'sigmaf': 962,
|
||
'sigma': 963,
|
||
'tau': 964,
|
||
'upsilon': 965,
|
||
'phi': 966,
|
||
'chi': 967,
|
||
'psi': 968,
|
||
'omega': 969,
|
||
'thetasym': 977,
|
||
'upsih': 978,
|
||
'piv': 982,
|
||
'ensp': 8194,
|
||
'emsp': 8195,
|
||
'thinsp': 8201,
|
||
'zwnj': 8204,
|
||
'zwj': 8205,
|
||
'lrm': 8206,
|
||
'rlm': 8207,
|
||
'ndash': 8211,
|
||
'mdash': 8212,
|
||
'lsquo': 8216,
|
||
'rsquo': 8217,
|
||
'sbquo': 8218,
|
||
'ldquo': 8220,
|
||
'rdquo': 8221,
|
||
'bdquo': 8222,
|
||
'dagger': 8224,
|
||
'Dagger': 8225,
|
||
'bull': 8226,
|
||
'hellip': 8230,
|
||
'permil': 8240,
|
||
'prime': 8242,
|
||
'Prime': 8243,
|
||
'lsaquo': 8249,
|
||
'rsaquo': 8250,
|
||
'oline': 8254,
|
||
'frasl': 8260,
|
||
'euro': 8364,
|
||
'image': 8465,
|
||
'weierp': 8472,
|
||
'real': 8476,
|
||
'trade': 8482,
|
||
'alefsym': 8501,
|
||
'larr': 8592,
|
||
'uarr': 8593,
|
||
'rarr': 8594,
|
||
'darr': 8595,
|
||
'harr': 8596,
|
||
'crarr': 8629,
|
||
'lArr': 8656,
|
||
'uArr': 8657,
|
||
'rArr': 8658,
|
||
'dArr': 8659,
|
||
'hArr': 8660,
|
||
'forall': 8704,
|
||
'part': 8706,
|
||
'exist': 8707,
|
||
'empty': 8709,
|
||
'nabla': 8711,
|
||
'isin': 8712,
|
||
'notin': 8713,
|
||
'ni': 8715,
|
||
'prod': 8719,
|
||
'sum': 8721,
|
||
'minus': 8722,
|
||
'lowast': 8727,
|
||
'radic': 8730,
|
||
'prop': 8733,
|
||
'infin': 8734,
|
||
'ang': 8736,
|
||
'and': 8743,
|
||
'or': 8744,
|
||
'cap': 8745,
|
||
'cup': 8746,
|
||
'int': 8747,
|
||
'there4': 8756,
|
||
'sim': 8764,
|
||
'cong': 8773,
|
||
'asymp': 8776,
|
||
'ne': 8800,
|
||
'equiv': 8801,
|
||
'le': 8804,
|
||
'ge': 8805,
|
||
'sub': 8834,
|
||
'sup': 8835,
|
||
'nsub': 8836,
|
||
'sube': 8838,
|
||
'supe': 8839,
|
||
'oplus': 8853,
|
||
'otimes': 8855,
|
||
'perp': 8869,
|
||
'sdot': 8901,
|
||
'lceil': 8968,
|
||
'rceil': 8969,
|
||
'lfloor': 8970,
|
||
'rfloor': 8971,
|
||
'lang': 9001,
|
||
'rang': 9002,
|
||
'loz': 9674,
|
||
'spades': 9824,
|
||
'clubs': 9827,
|
||
'hearts': 9829,
|
||
'diams': 9830
|
||
}
|
||
|
||
Object.keys(sax.ENTITIES).forEach(function (key) {
|
||
var e = sax.ENTITIES[key]
|
||
var s = typeof e === 'number' ? String.fromCharCode(e) : e
|
||
sax.ENTITIES[key] = s
|
||
})
|
||
|
||
for (var s in sax.STATE) {
|
||
sax.STATE[sax.STATE[s]] = s
|
||
}
|
||
|
||
// shorthand
|
||
S = sax.STATE
|
||
|
||
function emit (parser, event, data) {
|
||
parser[event] && parser[event](data)
|
||
}
|
||
|
||
function emitNode (parser, nodeType, data) {
|
||
if (parser.textNode) closeText(parser)
|
||
emit(parser, nodeType, data)
|
||
}
|
||
|
||
function closeText (parser) {
|
||
parser.textNode = textopts(parser.opt, parser.textNode)
|
||
if (parser.textNode) emit(parser, 'ontext', parser.textNode)
|
||
parser.textNode = ''
|
||
}
|
||
|
||
function textopts (opt, text) {
|
||
if (opt.trim) text = text.trim()
|
||
if (opt.normalize) text = text.replace(/\s+/g, ' ')
|
||
return text
|
||
}
|
||
|
||
function error (parser, er) {
|
||
closeText(parser)
|
||
if (parser.trackPosition) {
|
||
er += '\nLine: ' + parser.line +
|
||
'\nColumn: ' + parser.column +
|
||
'\nChar: ' + parser.c
|
||
}
|
||
er = new Error(er)
|
||
parser.error = er
|
||
emit(parser, 'onerror', er)
|
||
return parser
|
||
}
|
||
|
||
function end (parser) {
|
||
if (parser.sawRoot && !parser.closedRoot) strictFail(parser, 'Unclosed root tag')
|
||
if ((parser.state !== S.BEGIN) &&
|
||
(parser.state !== S.BEGIN_WHITESPACE) &&
|
||
(parser.state !== S.TEXT)) {
|
||
error(parser, 'Unexpected end')
|
||
}
|
||
closeText(parser)
|
||
parser.c = ''
|
||
parser.closed = true
|
||
emit(parser, 'onend')
|
||
SAXParser.call(parser, parser.strict, parser.opt)
|
||
return parser
|
||
}
|
||
|
||
function strictFail (parser, message) {
|
||
if (typeof parser !== 'object' || !(parser instanceof SAXParser)) {
|
||
throw new Error('bad call to strictFail')
|
||
}
|
||
if (parser.strict) {
|
||
error(parser, message)
|
||
}
|
||
}
|
||
|
||
function newTag (parser) {
|
||
if (!parser.strict) parser.tagName = parser.tagName[parser.looseCase]()
|
||
var parent = parser.tags[parser.tags.length - 1] || parser
|
||
var tag = parser.tag = { name: parser.tagName, attributes: {} }
|
||
|
||
// will be overridden if tag contails an xmlns="foo" or xmlns:foo="bar"
|
||
if (parser.opt.xmlns) {
|
||
tag.ns = parent.ns
|
||
}
|
||
parser.attribList.length = 0
|
||
emitNode(parser, 'onopentagstart', tag)
|
||
}
|
||
|
||
function qname (name, attribute) {
|
||
var i = name.indexOf(':')
|
||
var qualName = i < 0 ? [ '', name ] : name.split(':')
|
||
var prefix = qualName[0]
|
||
var local = qualName[1]
|
||
|
||
// <x "xmlns"="http://foo">
|
||
if (attribute && name === 'xmlns') {
|
||
prefix = 'xmlns'
|
||
local = ''
|
||
}
|
||
|
||
return { prefix: prefix, local: local }
|
||
}
|
||
|
||
function attrib (parser) {
|
||
if (!parser.strict) {
|
||
parser.attribName = parser.attribName[parser.looseCase]()
|
||
}
|
||
|
||
if (parser.attribList.indexOf(parser.attribName) !== -1 ||
|
||
parser.tag.attributes.hasOwnProperty(parser.attribName)) {
|
||
parser.attribName = parser.attribValue = ''
|
||
return
|
||
}
|
||
|
||
if (parser.opt.xmlns) {
|
||
var qn = qname(parser.attribName, true)
|
||
var prefix = qn.prefix
|
||
var local = qn.local
|
||
|
||
if (prefix === 'xmlns') {
|
||
// namespace binding attribute. push the binding into scope
|
||
if (local === 'xml' && parser.attribValue !== XML_NAMESPACE) {
|
||
strictFail(parser,
|
||
'xml: prefix must be bound to ' + XML_NAMESPACE + '\n' +
|
||
'Actual: ' + parser.attribValue)
|
||
} else if (local === 'xmlns' && parser.attribValue !== XMLNS_NAMESPACE) {
|
||
strictFail(parser,
|
||
'xmlns: prefix must be bound to ' + XMLNS_NAMESPACE + '\n' +
|
||
'Actual: ' + parser.attribValue)
|
||
} else {
|
||
var tag = parser.tag
|
||
var parent = parser.tags[parser.tags.length - 1] || parser
|
||
if (tag.ns === parent.ns) {
|
||
tag.ns = Object.create(parent.ns)
|
||
}
|
||
tag.ns[local] = parser.attribValue
|
||
}
|
||
}
|
||
|
||
// defer onattribute events until all attributes have been seen
|
||
// so any new bindings can take effect. preserve attribute order
|
||
// so deferred events can be emitted in document order
|
||
parser.attribList.push([parser.attribName, parser.attribValue])
|
||
} else {
|
||
// in non-xmlns mode, we can emit the event right away
|
||
parser.tag.attributes[parser.attribName] = parser.attribValue
|
||
emitNode(parser, 'onattribute', {
|
||
name: parser.attribName,
|
||
value: parser.attribValue
|
||
})
|
||
}
|
||
|
||
parser.attribName = parser.attribValue = ''
|
||
}
|
||
|
||
function openTag (parser, selfClosing) {
|
||
if (parser.opt.xmlns) {
|
||
// emit namespace binding events
|
||
var tag = parser.tag
|
||
|
||
// add namespace info to tag
|
||
var qn = qname(parser.tagName)
|
||
tag.prefix = qn.prefix
|
||
tag.local = qn.local
|
||
tag.uri = tag.ns[qn.prefix] || ''
|
||
|
||
if (tag.prefix && !tag.uri) {
|
||
strictFail(parser, 'Unbound namespace prefix: ' +
|
||
JSON.stringify(parser.tagName))
|
||
tag.uri = qn.prefix
|
||
}
|
||
|
||
var parent = parser.tags[parser.tags.length - 1] || parser
|
||
if (tag.ns && parent.ns !== tag.ns) {
|
||
Object.keys(tag.ns).forEach(function (p) {
|
||
emitNode(parser, 'onopennamespace', {
|
||
prefix: p,
|
||
uri: tag.ns[p]
|
||
})
|
||
})
|
||
}
|
||
|
||
// handle deferred onattribute events
|
||
// Note: do not apply default ns to attributes:
|
||
// http://www.w3.org/TR/REC-xml-names/#defaulting
|
||
for (var i = 0, l = parser.attribList.length; i < l; i++) {
|
||
var nv = parser.attribList[i]
|
||
var name = nv[0]
|
||
var value = nv[1]
|
||
var qualName = qname(name, true)
|
||
var prefix = qualName.prefix
|
||
var local = qualName.local
|
||
var uri = prefix === '' ? '' : (tag.ns[prefix] || '')
|
||
var a = {
|
||
name: name,
|
||
value: value,
|
||
prefix: prefix,
|
||
local: local,
|
||
uri: uri
|
||
}
|
||
|
||
// if there's any attributes with an undefined namespace,
|
||
// then fail on them now.
|
||
if (prefix && prefix !== 'xmlns' && !uri) {
|
||
strictFail(parser, 'Unbound namespace prefix: ' +
|
||
JSON.stringify(prefix))
|
||
a.uri = prefix
|
||
}
|
||
parser.tag.attributes[name] = a
|
||
emitNode(parser, 'onattribute', a)
|
||
}
|
||
parser.attribList.length = 0
|
||
}
|
||
|
||
parser.tag.isSelfClosing = !!selfClosing
|
||
|
||
// process the tag
|
||
parser.sawRoot = true
|
||
parser.tags.push(parser.tag)
|
||
emitNode(parser, 'onopentag', parser.tag)
|
||
if (!selfClosing) {
|
||
// special case for <script> in non-strict mode.
|
||
if (!parser.noscript && parser.tagName.toLowerCase() === 'script') {
|
||
parser.state = S.SCRIPT
|
||
} else {
|
||
parser.state = S.TEXT
|
||
}
|
||
parser.tag = null
|
||
parser.tagName = ''
|
||
}
|
||
parser.attribName = parser.attribValue = ''
|
||
parser.attribList.length = 0
|
||
}
|
||
|
||
function closeTag (parser) {
|
||
if (!parser.tagName) {
|
||
strictFail(parser, 'Weird empty close tag.')
|
||
parser.textNode += '</>'
|
||
parser.state = S.TEXT
|
||
return
|
||
}
|
||
|
||
if (parser.script) {
|
||
if (parser.tagName !== 'script') {
|
||
parser.script += '</' + parser.tagName + '>'
|
||
parser.tagName = ''
|
||
parser.state = S.SCRIPT
|
||
return
|
||
}
|
||
emitNode(parser, 'onscript', parser.script)
|
||
parser.script = ''
|
||
}
|
||
|
||
// first make sure that the closing tag actually exists.
|
||
// <a><b></c></b></a> will close everything, otherwise.
|
||
var t = parser.tags.length
|
||
var tagName = parser.tagName
|
||
if (!parser.strict) {
|
||
tagName = tagName[parser.looseCase]()
|
||
}
|
||
var closeTo = tagName
|
||
while (t--) {
|
||
var close = parser.tags[t]
|
||
if (close.name !== closeTo) {
|
||
// fail the first time in strict mode
|
||
strictFail(parser, 'Unexpected close tag')
|
||
} else {
|
||
break
|
||
}
|
||
}
|
||
|
||
// didn't find it. we already failed for strict, so just abort.
|
||
if (t < 0) {
|
||
strictFail(parser, 'Unmatched closing tag: ' + parser.tagName)
|
||
parser.textNode += '</' + parser.tagName + '>'
|
||
parser.state = S.TEXT
|
||
return
|
||
}
|
||
parser.tagName = tagName
|
||
var s = parser.tags.length
|
||
while (s-- > t) {
|
||
var tag = parser.tag = parser.tags.pop()
|
||
parser.tagName = parser.tag.name
|
||
emitNode(parser, 'onclosetag', parser.tagName)
|
||
|
||
var x = {}
|
||
for (var i in tag.ns) {
|
||
x[i] = tag.ns[i]
|
||
}
|
||
|
||
var parent = parser.tags[parser.tags.length - 1] || parser
|
||
if (parser.opt.xmlns && tag.ns !== parent.ns) {
|
||
// remove namespace bindings introduced by tag
|
||
Object.keys(tag.ns).forEach(function (p) {
|
||
var n = tag.ns[p]
|
||
emitNode(parser, 'onclosenamespace', { prefix: p, uri: n })
|
||
})
|
||
}
|
||
}
|
||
if (t === 0) parser.closedRoot = true
|
||
parser.tagName = parser.attribValue = parser.attribName = ''
|
||
parser.attribList.length = 0
|
||
parser.state = S.TEXT
|
||
}
|
||
|
||
function parseEntity (parser) {
|
||
var entity = parser.entity
|
||
var entityLC = entity.toLowerCase()
|
||
var num
|
||
var numStr = ''
|
||
|
||
if (parser.ENTITIES[entity]) {
|
||
return parser.ENTITIES[entity]
|
||
}
|
||
if (parser.ENTITIES[entityLC]) {
|
||
return parser.ENTITIES[entityLC]
|
||
}
|
||
entity = entityLC
|
||
if (entity.charAt(0) === '#') {
|
||
if (entity.charAt(1) === 'x') {
|
||
entity = entity.slice(2)
|
||
num = parseInt(entity, 16)
|
||
numStr = num.toString(16)
|
||
} else {
|
||
entity = entity.slice(1)
|
||
num = parseInt(entity, 10)
|
||
numStr = num.toString(10)
|
||
}
|
||
}
|
||
entity = entity.replace(/^0+/, '')
|
||
if (isNaN(num) || numStr.toLowerCase() !== entity) {
|
||
strictFail(parser, 'Invalid character entity')
|
||
return '&' + parser.entity + ';'
|
||
}
|
||
|
||
return String.fromCodePoint(num)
|
||
}
|
||
|
||
function beginWhiteSpace (parser, c) {
|
||
if (c === '<') {
|
||
parser.state = S.OPEN_WAKA
|
||
parser.startTagPosition = parser.position
|
||
} else if (!isWhitespace(c)) {
|
||
// have to process this as a text node.
|
||
// weird, but happens.
|
||
strictFail(parser, 'Non-whitespace before first tag.')
|
||
parser.textNode = c
|
||
parser.state = S.TEXT
|
||
}
|
||
}
|
||
|
||
function charAt (chunk, i) {
|
||
var result = ''
|
||
if (i < chunk.length) {
|
||
result = chunk.charAt(i)
|
||
}
|
||
return result
|
||
}
|
||
|
||
function write (chunk) {
|
||
var parser = this
|
||
if (this.error) {
|
||
throw this.error
|
||
}
|
||
if (parser.closed) {
|
||
return error(parser,
|
||
'Cannot write after close. Assign an onready handler.')
|
||
}
|
||
if (chunk === null) {
|
||
return end(parser)
|
||
}
|
||
if (typeof chunk === 'object') {
|
||
chunk = chunk.toString()
|
||
}
|
||
var i = 0
|
||
var c = ''
|
||
while (true) {
|
||
c = charAt(chunk, i++)
|
||
parser.c = c
|
||
|
||
if (!c) {
|
||
break
|
||
}
|
||
|
||
if (parser.trackPosition) {
|
||
parser.position++
|
||
if (c === '\n') {
|
||
parser.line++
|
||
parser.column = 0
|
||
} else {
|
||
parser.column++
|
||
}
|
||
}
|
||
|
||
switch (parser.state) {
|
||
case S.BEGIN:
|
||
parser.state = S.BEGIN_WHITESPACE
|
||
if (c === '\uFEFF') {
|
||
continue
|
||
}
|
||
beginWhiteSpace(parser, c)
|
||
continue
|
||
|
||
case S.BEGIN_WHITESPACE:
|
||
beginWhiteSpace(parser, c)
|
||
continue
|
||
|
||
case S.TEXT:
|
||
if (parser.sawRoot && !parser.closedRoot) {
|
||
var starti = i - 1
|
||
while (c && c !== '<' && c !== '&') {
|
||
c = charAt(chunk, i++)
|
||
if (c && parser.trackPosition) {
|
||
parser.position++
|
||
if (c === '\n') {
|
||
parser.line++
|
||
parser.column = 0
|
||
} else {
|
||
parser.column++
|
||
}
|
||
}
|
||
}
|
||
parser.textNode += chunk.substring(starti, i - 1)
|
||
}
|
||
if (c === '<' && !(parser.sawRoot && parser.closedRoot && !parser.strict)) {
|
||
parser.state = S.OPEN_WAKA
|
||
parser.startTagPosition = parser.position
|
||
} else {
|
||
if (!isWhitespace(c) && (!parser.sawRoot || parser.closedRoot)) {
|
||
strictFail(parser, 'Text data outside of root node.')
|
||
}
|
||
if (c === '&') {
|
||
parser.state = S.TEXT_ENTITY
|
||
} else {
|
||
parser.textNode += c
|
||
}
|
||
}
|
||
continue
|
||
|
||
case S.SCRIPT:
|
||
// only non-strict
|
||
if (c === '<') {
|
||
parser.state = S.SCRIPT_ENDING
|
||
} else {
|
||
parser.script += c
|
||
}
|
||
continue
|
||
|
||
case S.SCRIPT_ENDING:
|
||
if (c === '/') {
|
||
parser.state = S.CLOSE_TAG
|
||
} else {
|
||
parser.script += '<' + c
|
||
parser.state = S.SCRIPT
|
||
}
|
||
continue
|
||
|
||
case S.OPEN_WAKA:
|
||
// either a /, ?, !, or text is coming next.
|
||
if (c === '!') {
|
||
parser.state = S.SGML_DECL
|
||
parser.sgmlDecl = ''
|
||
} else if (isWhitespace(c)) {
|
||
// wait for it...
|
||
} else if (isMatch(nameStart, c)) {
|
||
parser.state = S.OPEN_TAG
|
||
parser.tagName = c
|
||
} else if (c === '/') {
|
||
parser.state = S.CLOSE_TAG
|
||
parser.tagName = ''
|
||
} else if (c === '?') {
|
||
parser.state = S.PROC_INST
|
||
parser.procInstName = parser.procInstBody = ''
|
||
} else {
|
||
strictFail(parser, 'Unencoded <')
|
||
// if there was some whitespace, then add that in.
|
||
if (parser.startTagPosition + 1 < parser.position) {
|
||
var pad = parser.position - parser.startTagPosition
|
||
c = new Array(pad).join(' ') + c
|
||
}
|
||
parser.textNode += '<' + c
|
||
parser.state = S.TEXT
|
||
}
|
||
continue
|
||
|
||
case S.SGML_DECL:
|
||
if ((parser.sgmlDecl + c).toUpperCase() === CDATA) {
|
||
emitNode(parser, 'onopencdata')
|
||
parser.state = S.CDATA
|
||
parser.sgmlDecl = ''
|
||
parser.cdata = ''
|
||
} else if (parser.sgmlDecl + c === '--') {
|
||
parser.state = S.COMMENT
|
||
parser.comment = ''
|
||
parser.sgmlDecl = ''
|
||
} else if ((parser.sgmlDecl + c).toUpperCase() === DOCTYPE) {
|
||
parser.state = S.DOCTYPE
|
||
if (parser.doctype || parser.sawRoot) {
|
||
strictFail(parser,
|
||
'Inappropriately located doctype declaration')
|
||
}
|
||
parser.doctype = ''
|
||
parser.sgmlDecl = ''
|
||
} else if (c === '>') {
|
||
emitNode(parser, 'onsgmldeclaration', parser.sgmlDecl)
|
||
parser.sgmlDecl = ''
|
||
parser.state = S.TEXT
|
||
} else if (isQuote(c)) {
|
||
parser.state = S.SGML_DECL_QUOTED
|
||
parser.sgmlDecl += c
|
||
} else {
|
||
parser.sgmlDecl += c
|
||
}
|
||
continue
|
||
|
||
case S.SGML_DECL_QUOTED:
|
||
if (c === parser.q) {
|
||
parser.state = S.SGML_DECL
|
||
parser.q = ''
|
||
}
|
||
parser.sgmlDecl += c
|
||
continue
|
||
|
||
case S.DOCTYPE:
|
||
if (c === '>') {
|
||
parser.state = S.TEXT
|
||
emitNode(parser, 'ondoctype', parser.doctype)
|
||
parser.doctype = true // just remember that we saw it.
|
||
} else {
|
||
parser.doctype += c
|
||
if (c === '[') {
|
||
parser.state = S.DOCTYPE_DTD
|
||
} else if (isQuote(c)) {
|
||
parser.state = S.DOCTYPE_QUOTED
|
||
parser.q = c
|
||
}
|
||
}
|
||
continue
|
||
|
||
case S.DOCTYPE_QUOTED:
|
||
parser.doctype += c
|
||
if (c === parser.q) {
|
||
parser.q = ''
|
||
parser.state = S.DOCTYPE
|
||
}
|
||
continue
|
||
|
||
case S.DOCTYPE_DTD:
|
||
parser.doctype += c
|
||
if (c === ']') {
|
||
parser.state = S.DOCTYPE
|
||
} else if (isQuote(c)) {
|
||
parser.state = S.DOCTYPE_DTD_QUOTED
|
||
parser.q = c
|
||
}
|
||
continue
|
||
|
||
case S.DOCTYPE_DTD_QUOTED:
|
||
parser.doctype += c
|
||
if (c === parser.q) {
|
||
parser.state = S.DOCTYPE_DTD
|
||
parser.q = ''
|
||
}
|
||
continue
|
||
|
||
case S.COMMENT:
|
||
if (c === '-') {
|
||
parser.state = S.COMMENT_ENDING
|
||
} else {
|
||
parser.comment += c
|
||
}
|
||
continue
|
||
|
||
case S.COMMENT_ENDING:
|
||
if (c === '-') {
|
||
parser.state = S.COMMENT_ENDED
|
||
parser.comment = textopts(parser.opt, parser.comment)
|
||
if (parser.comment) {
|
||
emitNode(parser, 'oncomment', parser.comment)
|
||
}
|
||
parser.comment = ''
|
||
} else {
|
||
parser.comment += '-' + c
|
||
parser.state = S.COMMENT
|
||
}
|
||
continue
|
||
|
||
case S.COMMENT_ENDED:
|
||
if (c !== '>') {
|
||
strictFail(parser, 'Malformed comment')
|
||
// allow <!-- blah -- bloo --> in non-strict mode,
|
||
// which is a comment of " blah -- bloo "
|
||
parser.comment += '--' + c
|
||
parser.state = S.COMMENT
|
||
} else {
|
||
parser.state = S.TEXT
|
||
}
|
||
continue
|
||
|
||
case S.CDATA:
|
||
if (c === ']') {
|
||
parser.state = S.CDATA_ENDING
|
||
} else {
|
||
parser.cdata += c
|
||
}
|
||
continue
|
||
|
||
case S.CDATA_ENDING:
|
||
if (c === ']') {
|
||
parser.state = S.CDATA_ENDING_2
|
||
} else {
|
||
parser.cdata += ']' + c
|
||
parser.state = S.CDATA
|
||
}
|
||
continue
|
||
|
||
case S.CDATA_ENDING_2:
|
||
if (c === '>') {
|
||
if (parser.cdata) {
|
||
emitNode(parser, 'oncdata', parser.cdata)
|
||
}
|
||
emitNode(parser, 'onclosecdata')
|
||
parser.cdata = ''
|
||
parser.state = S.TEXT
|
||
} else if (c === ']') {
|
||
parser.cdata += ']'
|
||
} else {
|
||
parser.cdata += ']]' + c
|
||
parser.state = S.CDATA
|
||
}
|
||
continue
|
||
|
||
case S.PROC_INST:
|
||
if (c === '?') {
|
||
parser.state = S.PROC_INST_ENDING
|
||
} else if (isWhitespace(c)) {
|
||
parser.state = S.PROC_INST_BODY
|
||
} else {
|
||
parser.procInstName += c
|
||
}
|
||
continue
|
||
|
||
case S.PROC_INST_BODY:
|
||
if (!parser.procInstBody && isWhitespace(c)) {
|
||
continue
|
||
} else if (c === '?') {
|
||
parser.state = S.PROC_INST_ENDING
|
||
} else {
|
||
parser.procInstBody += c
|
||
}
|
||
continue
|
||
|
||
case S.PROC_INST_ENDING:
|
||
if (c === '>') {
|
||
emitNode(parser, 'onprocessinginstruction', {
|
||
name: parser.procInstName,
|
||
body: parser.procInstBody
|
||
})
|
||
parser.procInstName = parser.procInstBody = ''
|
||
parser.state = S.TEXT
|
||
} else {
|
||
parser.procInstBody += '?' + c
|
||
parser.state = S.PROC_INST_BODY
|
||
}
|
||
continue
|
||
|
||
case S.OPEN_TAG:
|
||
if (isMatch(nameBody, c)) {
|
||
parser.tagName += c
|
||
} else {
|
||
newTag(parser)
|
||
if (c === '>') {
|
||
openTag(parser)
|
||
} else if (c === '/') {
|
||
parser.state = S.OPEN_TAG_SLASH
|
||
} else {
|
||
if (!isWhitespace(c)) {
|
||
strictFail(parser, 'Invalid character in tag name')
|
||
}
|
||
parser.state = S.ATTRIB
|
||
}
|
||
}
|
||
continue
|
||
|
||
case S.OPEN_TAG_SLASH:
|
||
if (c === '>') {
|
||
openTag(parser, true)
|
||
closeTag(parser)
|
||
} else {
|
||
strictFail(parser, 'Forward-slash in opening tag not followed by >')
|
||
parser.state = S.ATTRIB
|
||
}
|
||
continue
|
||
|
||
case S.ATTRIB:
|
||
// haven't read the attribute name yet.
|
||
if (isWhitespace(c)) {
|
||
continue
|
||
} else if (c === '>') {
|
||
openTag(parser)
|
||
} else if (c === '/') {
|
||
parser.state = S.OPEN_TAG_SLASH
|
||
} else if (isMatch(nameStart, c)) {
|
||
parser.attribName = c
|
||
parser.attribValue = ''
|
||
parser.state = S.ATTRIB_NAME
|
||
} else {
|
||
strictFail(parser, 'Invalid attribute name')
|
||
}
|
||
continue
|
||
|
||
case S.ATTRIB_NAME:
|
||
if (c === '=') {
|
||
parser.state = S.ATTRIB_VALUE
|
||
} else if (c === '>') {
|
||
strictFail(parser, 'Attribute without value')
|
||
parser.attribValue = parser.attribName
|
||
attrib(parser)
|
||
openTag(parser)
|
||
} else if (isWhitespace(c)) {
|
||
parser.state = S.ATTRIB_NAME_SAW_WHITE
|
||
} else if (isMatch(nameBody, c)) {
|
||
parser.attribName += c
|
||
} else {
|
||
strictFail(parser, 'Invalid attribute name')
|
||
}
|
||
continue
|
||
|
||
case S.ATTRIB_NAME_SAW_WHITE:
|
||
if (c === '=') {
|
||
parser.state = S.ATTRIB_VALUE
|
||
} else if (isWhitespace(c)) {
|
||
continue
|
||
} else {
|
||
strictFail(parser, 'Attribute without value')
|
||
parser.tag.attributes[parser.attribName] = ''
|
||
parser.attribValue = ''
|
||
emitNode(parser, 'onattribute', {
|
||
name: parser.attribName,
|
||
value: ''
|
||
})
|
||
parser.attribName = ''
|
||
if (c === '>') {
|
||
openTag(parser)
|
||
} else if (isMatch(nameStart, c)) {
|
||
parser.attribName = c
|
||
parser.state = S.ATTRIB_NAME
|
||
} else {
|
||
strictFail(parser, 'Invalid attribute name')
|
||
parser.state = S.ATTRIB
|
||
}
|
||
}
|
||
continue
|
||
|
||
case S.ATTRIB_VALUE:
|
||
if (isWhitespace(c)) {
|
||
continue
|
||
} else if (isQuote(c)) {
|
||
parser.q = c
|
||
parser.state = S.ATTRIB_VALUE_QUOTED
|
||
} else {
|
||
strictFail(parser, 'Unquoted attribute value')
|
||
parser.state = S.ATTRIB_VALUE_UNQUOTED
|
||
parser.attribValue = c
|
||
}
|
||
continue
|
||
|
||
case S.ATTRIB_VALUE_QUOTED:
|
||
if (c !== parser.q) {
|
||
if (c === '&') {
|
||
parser.state = S.ATTRIB_VALUE_ENTITY_Q
|
||
} else {
|
||
parser.attribValue += c
|
||
}
|
||
continue
|
||
}
|
||
attrib(parser)
|
||
parser.q = ''
|
||
parser.state = S.ATTRIB_VALUE_CLOSED
|
||
continue
|
||
|
||
case S.ATTRIB_VALUE_CLOSED:
|
||
if (isWhitespace(c)) {
|
||
parser.state = S.ATTRIB
|
||
} else if (c === '>') {
|
||
openTag(parser)
|
||
} else if (c === '/') {
|
||
parser.state = S.OPEN_TAG_SLASH
|
||
} else if (isMatch(nameStart, c)) {
|
||
strictFail(parser, 'No whitespace between attributes')
|
||
parser.attribName = c
|
||
parser.attribValue = ''
|
||
parser.state = S.ATTRIB_NAME
|
||
} else {
|
||
strictFail(parser, 'Invalid attribute name')
|
||
}
|
||
continue
|
||
|
||
case S.ATTRIB_VALUE_UNQUOTED:
|
||
if (!isAttribEnd(c)) {
|
||
if (c === '&') {
|
||
parser.state = S.ATTRIB_VALUE_ENTITY_U
|
||
} else {
|
||
parser.attribValue += c
|
||
}
|
||
continue
|
||
}
|
||
attrib(parser)
|
||
if (c === '>') {
|
||
openTag(parser)
|
||
} else {
|
||
parser.state = S.ATTRIB
|
||
}
|
||
continue
|
||
|
||
case S.CLOSE_TAG:
|
||
if (!parser.tagName) {
|
||
if (isWhitespace(c)) {
|
||
continue
|
||
} else if (notMatch(nameStart, c)) {
|
||
if (parser.script) {
|
||
parser.script += '</' + c
|
||
parser.state = S.SCRIPT
|
||
} else {
|
||
strictFail(parser, 'Invalid tagname in closing tag.')
|
||
}
|
||
} else {
|
||
parser.tagName = c
|
||
}
|
||
} else if (c === '>') {
|
||
closeTag(parser)
|
||
} else if (isMatch(nameBody, c)) {
|
||
parser.tagName += c
|
||
} else if (parser.script) {
|
||
parser.script += '</' + parser.tagName
|
||
parser.tagName = ''
|
||
parser.state = S.SCRIPT
|
||
} else {
|
||
if (!isWhitespace(c)) {
|
||
strictFail(parser, 'Invalid tagname in closing tag')
|
||
}
|
||
parser.state = S.CLOSE_TAG_SAW_WHITE
|
||
}
|
||
continue
|
||
|
||
case S.CLOSE_TAG_SAW_WHITE:
|
||
if (isWhitespace(c)) {
|
||
continue
|
||
}
|
||
if (c === '>') {
|
||
closeTag(parser)
|
||
} else {
|
||
strictFail(parser, 'Invalid characters in closing tag')
|
||
}
|
||
continue
|
||
|
||
case S.TEXT_ENTITY:
|
||
case S.ATTRIB_VALUE_ENTITY_Q:
|
||
case S.ATTRIB_VALUE_ENTITY_U:
|
||
var returnState
|
||
var buffer
|
||
switch (parser.state) {
|
||
case S.TEXT_ENTITY:
|
||
returnState = S.TEXT
|
||
buffer = 'textNode'
|
||
break
|
||
|
||
case S.ATTRIB_VALUE_ENTITY_Q:
|
||
returnState = S.ATTRIB_VALUE_QUOTED
|
||
buffer = 'attribValue'
|
||
break
|
||
|
||
case S.ATTRIB_VALUE_ENTITY_U:
|
||
returnState = S.ATTRIB_VALUE_UNQUOTED
|
||
buffer = 'attribValue'
|
||
break
|
||
}
|
||
|
||
if (c === ';') {
|
||
parser[buffer] += parseEntity(parser)
|
||
parser.entity = ''
|
||
parser.state = returnState
|
||
} else if (isMatch(parser.entity.length ? entityBody : entityStart, c)) {
|
||
parser.entity += c
|
||
} else {
|
||
strictFail(parser, 'Invalid character in entity name')
|
||
parser[buffer] += '&' + parser.entity + c
|
||
parser.entity = ''
|
||
parser.state = returnState
|
||
}
|
||
|
||
continue
|
||
|
||
default:
|
||
throw new Error(parser, 'Unknown state: ' + parser.state)
|
||
}
|
||
} // while
|
||
|
||
if (parser.position >= parser.bufferCheckPosition) {
|
||
checkBufferLength(parser)
|
||
}
|
||
return parser
|
||
}
|
||
|
||
/*! http://mths.be/fromcodepoint v0.1.0 by @mathias */
|
||
/* istanbul ignore next */
|
||
if (!String.fromCodePoint) {
|
||
(function () {
|
||
var stringFromCharCode = String.fromCharCode
|
||
var floor = Math.floor
|
||
var fromCodePoint = function () {
|
||
var MAX_SIZE = 0x4000
|
||
var codeUnits = []
|
||
var highSurrogate
|
||
var lowSurrogate
|
||
var index = -1
|
||
var length = arguments.length
|
||
if (!length) {
|
||
return ''
|
||
}
|
||
var result = ''
|
||
while (++index < length) {
|
||
var codePoint = Number(arguments[index])
|
||
if (
|
||
!isFinite(codePoint) || // `NaN`, `+Infinity`, or `-Infinity`
|
||
codePoint < 0 || // not a valid Unicode code point
|
||
codePoint > 0x10FFFF || // not a valid Unicode code point
|
||
floor(codePoint) !== codePoint // not an integer
|
||
) {
|
||
throw RangeError('Invalid code point: ' + codePoint)
|
||
}
|
||
if (codePoint <= 0xFFFF) { // BMP code point
|
||
codeUnits.push(codePoint)
|
||
} else { // Astral code point; split in surrogate halves
|
||
// http://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae
|
||
codePoint -= 0x10000
|
||
highSurrogate = (codePoint >> 10) + 0xD800
|
||
lowSurrogate = (codePoint % 0x400) + 0xDC00
|
||
codeUnits.push(highSurrogate, lowSurrogate)
|
||
}
|
||
if (index + 1 === length || codeUnits.length > MAX_SIZE) {
|
||
result += stringFromCharCode.apply(null, codeUnits)
|
||
codeUnits.length = 0
|
||
}
|
||
}
|
||
return result
|
||
}
|
||
/* istanbul ignore next */
|
||
if (Object.defineProperty) {
|
||
Object.defineProperty(String, 'fromCodePoint', {
|
||
value: fromCodePoint,
|
||
configurable: true,
|
||
writable: true
|
||
})
|
||
} else {
|
||
String.fromCodePoint = fromCodePoint
|
||
}
|
||
}())
|
||
}
|
||
})( false ? 0 : exports)
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 566:
|
||
/***/ ((module) => {
|
||
|
||
module.exports = {
|
||
|
||
isArray: function(value) {
|
||
if (Array.isArray) {
|
||
return Array.isArray(value);
|
||
}
|
||
// fallback for older browsers like IE 8
|
||
return Object.prototype.toString.call( value ) === '[object Array]';
|
||
}
|
||
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 821:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
/*jslint node:true */
|
||
|
||
var xml2js = __nccwpck_require__(903);
|
||
var xml2json = __nccwpck_require__(919);
|
||
var js2xml = __nccwpck_require__(453);
|
||
var json2xml = __nccwpck_require__(900);
|
||
|
||
module.exports = {
|
||
xml2js: xml2js,
|
||
xml2json: xml2json,
|
||
js2xml: js2xml,
|
||
json2xml: json2xml
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 453:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
var helper = __nccwpck_require__(717);
|
||
var isArray = (__nccwpck_require__(566).isArray);
|
||
|
||
var currentElement, currentElementName;
|
||
|
||
function validateOptions(userOptions) {
|
||
var options = helper.copyOptions(userOptions);
|
||
helper.ensureFlagExists('ignoreDeclaration', options);
|
||
helper.ensureFlagExists('ignoreInstruction', options);
|
||
helper.ensureFlagExists('ignoreAttributes', options);
|
||
helper.ensureFlagExists('ignoreText', options);
|
||
helper.ensureFlagExists('ignoreComment', options);
|
||
helper.ensureFlagExists('ignoreCdata', options);
|
||
helper.ensureFlagExists('ignoreDoctype', options);
|
||
helper.ensureFlagExists('compact', options);
|
||
helper.ensureFlagExists('indentText', options);
|
||
helper.ensureFlagExists('indentCdata', options);
|
||
helper.ensureFlagExists('indentAttributes', options);
|
||
helper.ensureFlagExists('indentInstruction', options);
|
||
helper.ensureFlagExists('fullTagEmptyElement', options);
|
||
helper.ensureFlagExists('noQuotesForNativeAttributes', options);
|
||
helper.ensureSpacesExists(options);
|
||
if (typeof options.spaces === 'number') {
|
||
options.spaces = Array(options.spaces + 1).join(' ');
|
||
}
|
||
helper.ensureKeyExists('declaration', options);
|
||
helper.ensureKeyExists('instruction', options);
|
||
helper.ensureKeyExists('attributes', options);
|
||
helper.ensureKeyExists('text', options);
|
||
helper.ensureKeyExists('comment', options);
|
||
helper.ensureKeyExists('cdata', options);
|
||
helper.ensureKeyExists('doctype', options);
|
||
helper.ensureKeyExists('type', options);
|
||
helper.ensureKeyExists('name', options);
|
||
helper.ensureKeyExists('elements', options);
|
||
helper.checkFnExists('doctype', options);
|
||
helper.checkFnExists('instruction', options);
|
||
helper.checkFnExists('cdata', options);
|
||
helper.checkFnExists('comment', options);
|
||
helper.checkFnExists('text', options);
|
||
helper.checkFnExists('instructionName', options);
|
||
helper.checkFnExists('elementName', options);
|
||
helper.checkFnExists('attributeName', options);
|
||
helper.checkFnExists('attributeValue', options);
|
||
helper.checkFnExists('attributes', options);
|
||
helper.checkFnExists('fullTagEmptyElement', options);
|
||
return options;
|
||
}
|
||
|
||
function writeIndentation(options, depth, firstLine) {
|
||
return (!firstLine && options.spaces ? '\n' : '') + Array(depth + 1).join(options.spaces);
|
||
}
|
||
|
||
function writeAttributes(attributes, options, depth) {
|
||
if (options.ignoreAttributes) {
|
||
return '';
|
||
}
|
||
if ('attributesFn' in options) {
|
||
attributes = options.attributesFn(attributes, currentElementName, currentElement);
|
||
}
|
||
var key, attr, attrName, quote, result = [];
|
||
for (key in attributes) {
|
||
if (attributes.hasOwnProperty(key) && attributes[key] !== null && attributes[key] !== undefined) {
|
||
quote = options.noQuotesForNativeAttributes && typeof attributes[key] !== 'string' ? '' : '"';
|
||
attr = '' + attributes[key]; // ensure number and boolean are converted to String
|
||
attr = attr.replace(/"/g, '"');
|
||
attrName = 'attributeNameFn' in options ? options.attributeNameFn(key, attr, currentElementName, currentElement) : key;
|
||
result.push((options.spaces && options.indentAttributes? writeIndentation(options, depth+1, false) : ' '));
|
||
result.push(attrName + '=' + quote + ('attributeValueFn' in options ? options.attributeValueFn(attr, key, currentElementName, currentElement) : attr) + quote);
|
||
}
|
||
}
|
||
if (attributes && Object.keys(attributes).length && options.spaces && options.indentAttributes) {
|
||
result.push(writeIndentation(options, depth, false));
|
||
}
|
||
return result.join('');
|
||
}
|
||
|
||
function writeDeclaration(declaration, options, depth) {
|
||
currentElement = declaration;
|
||
currentElementName = 'xml';
|
||
return options.ignoreDeclaration ? '' : '<?' + 'xml' + writeAttributes(declaration[options.attributesKey], options, depth) + '?>';
|
||
}
|
||
|
||
function writeInstruction(instruction, options, depth) {
|
||
if (options.ignoreInstruction) {
|
||
return '';
|
||
}
|
||
var key;
|
||
for (key in instruction) {
|
||
if (instruction.hasOwnProperty(key)) {
|
||
break;
|
||
}
|
||
}
|
||
var instructionName = 'instructionNameFn' in options ? options.instructionNameFn(key, instruction[key], currentElementName, currentElement) : key;
|
||
if (typeof instruction[key] === 'object') {
|
||
currentElement = instruction;
|
||
currentElementName = instructionName;
|
||
return '<?' + instructionName + writeAttributes(instruction[key][options.attributesKey], options, depth) + '?>';
|
||
} else {
|
||
var instructionValue = instruction[key] ? instruction[key] : '';
|
||
if ('instructionFn' in options) instructionValue = options.instructionFn(instructionValue, key, currentElementName, currentElement);
|
||
return '<?' + instructionName + (instructionValue ? ' ' + instructionValue : '') + '?>';
|
||
}
|
||
}
|
||
|
||
function writeComment(comment, options) {
|
||
return options.ignoreComment ? '' : '<!--' + ('commentFn' in options ? options.commentFn(comment, currentElementName, currentElement) : comment) + '-->';
|
||
}
|
||
|
||
function writeCdata(cdata, options) {
|
||
return options.ignoreCdata ? '' : '<![CDATA[' + ('cdataFn' in options ? options.cdataFn(cdata, currentElementName, currentElement) : cdata.replace(']]>', ']]]]><![CDATA[>')) + ']]>';
|
||
}
|
||
|
||
function writeDoctype(doctype, options) {
|
||
return options.ignoreDoctype ? '' : '<!DOCTYPE ' + ('doctypeFn' in options ? options.doctypeFn(doctype, currentElementName, currentElement) : doctype) + '>';
|
||
}
|
||
|
||
function writeText(text, options) {
|
||
if (options.ignoreText) return '';
|
||
text = '' + text; // ensure Number and Boolean are converted to String
|
||
text = text.replace(/&/g, '&'); // desanitize to avoid double sanitization
|
||
text = text.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>');
|
||
return 'textFn' in options ? options.textFn(text, currentElementName, currentElement) : text;
|
||
}
|
||
|
||
function hasContent(element, options) {
|
||
var i;
|
||
if (element.elements && element.elements.length) {
|
||
for (i = 0; i < element.elements.length; ++i) {
|
||
switch (element.elements[i][options.typeKey]) {
|
||
case 'text':
|
||
if (options.indentText) {
|
||
return true;
|
||
}
|
||
break; // skip to next key
|
||
case 'cdata':
|
||
if (options.indentCdata) {
|
||
return true;
|
||
}
|
||
break; // skip to next key
|
||
case 'instruction':
|
||
if (options.indentInstruction) {
|
||
return true;
|
||
}
|
||
break; // skip to next key
|
||
case 'doctype':
|
||
case 'comment':
|
||
case 'element':
|
||
return true;
|
||
default:
|
||
return true;
|
||
}
|
||
}
|
||
}
|
||
return false;
|
||
}
|
||
|
||
function writeElement(element, options, depth) {
|
||
currentElement = element;
|
||
currentElementName = element.name;
|
||
var xml = [], elementName = 'elementNameFn' in options ? options.elementNameFn(element.name, element) : element.name;
|
||
xml.push('<' + elementName);
|
||
if (element[options.attributesKey]) {
|
||
xml.push(writeAttributes(element[options.attributesKey], options, depth));
|
||
}
|
||
var withClosingTag = element[options.elementsKey] && element[options.elementsKey].length || element[options.attributesKey] && element[options.attributesKey]['xml:space'] === 'preserve';
|
||
if (!withClosingTag) {
|
||
if ('fullTagEmptyElementFn' in options) {
|
||
withClosingTag = options.fullTagEmptyElementFn(element.name, element);
|
||
} else {
|
||
withClosingTag = options.fullTagEmptyElement;
|
||
}
|
||
}
|
||
if (withClosingTag) {
|
||
xml.push('>');
|
||
if (element[options.elementsKey] && element[options.elementsKey].length) {
|
||
xml.push(writeElements(element[options.elementsKey], options, depth + 1));
|
||
currentElement = element;
|
||
currentElementName = element.name;
|
||
}
|
||
xml.push(options.spaces && hasContent(element, options) ? '\n' + Array(depth + 1).join(options.spaces) : '');
|
||
xml.push('</' + elementName + '>');
|
||
} else {
|
||
xml.push('/>');
|
||
}
|
||
return xml.join('');
|
||
}
|
||
|
||
function writeElements(elements, options, depth, firstLine) {
|
||
return elements.reduce(function (xml, element) {
|
||
var indent = writeIndentation(options, depth, firstLine && !xml);
|
||
switch (element.type) {
|
||
case 'element': return xml + indent + writeElement(element, options, depth);
|
||
case 'comment': return xml + indent + writeComment(element[options.commentKey], options);
|
||
case 'doctype': return xml + indent + writeDoctype(element[options.doctypeKey], options);
|
||
case 'cdata': return xml + (options.indentCdata ? indent : '') + writeCdata(element[options.cdataKey], options);
|
||
case 'text': return xml + (options.indentText ? indent : '') + writeText(element[options.textKey], options);
|
||
case 'instruction':
|
||
var instruction = {};
|
||
instruction[element[options.nameKey]] = element[options.attributesKey] ? element : element[options.instructionKey];
|
||
return xml + (options.indentInstruction ? indent : '') + writeInstruction(instruction, options, depth);
|
||
}
|
||
}, '');
|
||
}
|
||
|
||
function hasContentCompact(element, options, anyContent) {
|
||
var key;
|
||
for (key in element) {
|
||
if (element.hasOwnProperty(key)) {
|
||
switch (key) {
|
||
case options.parentKey:
|
||
case options.attributesKey:
|
||
break; // skip to next key
|
||
case options.textKey:
|
||
if (options.indentText || anyContent) {
|
||
return true;
|
||
}
|
||
break; // skip to next key
|
||
case options.cdataKey:
|
||
if (options.indentCdata || anyContent) {
|
||
return true;
|
||
}
|
||
break; // skip to next key
|
||
case options.instructionKey:
|
||
if (options.indentInstruction || anyContent) {
|
||
return true;
|
||
}
|
||
break; // skip to next key
|
||
case options.doctypeKey:
|
||
case options.commentKey:
|
||
return true;
|
||
default:
|
||
return true;
|
||
}
|
||
}
|
||
}
|
||
return false;
|
||
}
|
||
|
||
function writeElementCompact(element, name, options, depth, indent) {
|
||
currentElement = element;
|
||
currentElementName = name;
|
||
var elementName = 'elementNameFn' in options ? options.elementNameFn(name, element) : name;
|
||
if (typeof element === 'undefined' || element === null || element === '') {
|
||
return 'fullTagEmptyElementFn' in options && options.fullTagEmptyElementFn(name, element) || options.fullTagEmptyElement ? '<' + elementName + '></' + elementName + '>' : '<' + elementName + '/>';
|
||
}
|
||
var xml = [];
|
||
if (name) {
|
||
xml.push('<' + elementName);
|
||
if (typeof element !== 'object') {
|
||
xml.push('>' + writeText(element,options) + '</' + elementName + '>');
|
||
return xml.join('');
|
||
}
|
||
if (element[options.attributesKey]) {
|
||
xml.push(writeAttributes(element[options.attributesKey], options, depth));
|
||
}
|
||
var withClosingTag = hasContentCompact(element, options, true) || element[options.attributesKey] && element[options.attributesKey]['xml:space'] === 'preserve';
|
||
if (!withClosingTag) {
|
||
if ('fullTagEmptyElementFn' in options) {
|
||
withClosingTag = options.fullTagEmptyElementFn(name, element);
|
||
} else {
|
||
withClosingTag = options.fullTagEmptyElement;
|
||
}
|
||
}
|
||
if (withClosingTag) {
|
||
xml.push('>');
|
||
} else {
|
||
xml.push('/>');
|
||
return xml.join('');
|
||
}
|
||
}
|
||
xml.push(writeElementsCompact(element, options, depth + 1, false));
|
||
currentElement = element;
|
||
currentElementName = name;
|
||
if (name) {
|
||
xml.push((indent ? writeIndentation(options, depth, false) : '') + '</' + elementName + '>');
|
||
}
|
||
return xml.join('');
|
||
}
|
||
|
||
function writeElementsCompact(element, options, depth, firstLine) {
|
||
var i, key, nodes, xml = [];
|
||
for (key in element) {
|
||
if (element.hasOwnProperty(key)) {
|
||
nodes = isArray(element[key]) ? element[key] : [element[key]];
|
||
for (i = 0; i < nodes.length; ++i) {
|
||
switch (key) {
|
||
case options.declarationKey: xml.push(writeDeclaration(nodes[i], options, depth)); break;
|
||
case options.instructionKey: xml.push((options.indentInstruction ? writeIndentation(options, depth, firstLine) : '') + writeInstruction(nodes[i], options, depth)); break;
|
||
case options.attributesKey: case options.parentKey: break; // skip
|
||
case options.textKey: xml.push((options.indentText ? writeIndentation(options, depth, firstLine) : '') + writeText(nodes[i], options)); break;
|
||
case options.cdataKey: xml.push((options.indentCdata ? writeIndentation(options, depth, firstLine) : '') + writeCdata(nodes[i], options)); break;
|
||
case options.doctypeKey: xml.push(writeIndentation(options, depth, firstLine) + writeDoctype(nodes[i], options)); break;
|
||
case options.commentKey: xml.push(writeIndentation(options, depth, firstLine) + writeComment(nodes[i], options)); break;
|
||
default: xml.push(writeIndentation(options, depth, firstLine) + writeElementCompact(nodes[i], key, options, depth, hasContentCompact(nodes[i], options)));
|
||
}
|
||
firstLine = firstLine && !xml.length;
|
||
}
|
||
}
|
||
}
|
||
return xml.join('');
|
||
}
|
||
|
||
module.exports = function (js, options) {
|
||
options = validateOptions(options);
|
||
var xml = [];
|
||
currentElement = js;
|
||
currentElementName = '_root_';
|
||
if (options.compact) {
|
||
xml.push(writeElementsCompact(js, options, 0, true));
|
||
} else {
|
||
if (js[options.declarationKey]) {
|
||
xml.push(writeDeclaration(js[options.declarationKey], options, 0));
|
||
}
|
||
if (js[options.elementsKey] && js[options.elementsKey].length) {
|
||
xml.push(writeElements(js[options.elementsKey], options, 0, !xml.length));
|
||
}
|
||
}
|
||
return xml.join('');
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 900:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
var js2xml = __nccwpck_require__(453);
|
||
|
||
module.exports = function (json, options) {
|
||
if (json instanceof Buffer) {
|
||
json = json.toString();
|
||
}
|
||
var js = null;
|
||
if (typeof (json) === 'string') {
|
||
try {
|
||
js = JSON.parse(json);
|
||
} catch (e) {
|
||
throw new Error('The JSON structure is invalid');
|
||
}
|
||
} else {
|
||
js = json;
|
||
}
|
||
return js2xml(js, options);
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 717:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
var isArray = (__nccwpck_require__(566).isArray);
|
||
|
||
module.exports = {
|
||
|
||
copyOptions: function (options) {
|
||
var key, copy = {};
|
||
for (key in options) {
|
||
if (options.hasOwnProperty(key)) {
|
||
copy[key] = options[key];
|
||
}
|
||
}
|
||
return copy;
|
||
},
|
||
|
||
ensureFlagExists: function (item, options) {
|
||
if (!(item in options) || typeof options[item] !== 'boolean') {
|
||
options[item] = false;
|
||
}
|
||
},
|
||
|
||
ensureSpacesExists: function (options) {
|
||
if (!('spaces' in options) || (typeof options.spaces !== 'number' && typeof options.spaces !== 'string')) {
|
||
options.spaces = 0;
|
||
}
|
||
},
|
||
|
||
ensureAlwaysArrayExists: function (options) {
|
||
if (!('alwaysArray' in options) || (typeof options.alwaysArray !== 'boolean' && !isArray(options.alwaysArray))) {
|
||
options.alwaysArray = false;
|
||
}
|
||
},
|
||
|
||
ensureKeyExists: function (key, options) {
|
||
if (!(key + 'Key' in options) || typeof options[key + 'Key'] !== 'string') {
|
||
options[key + 'Key'] = options.compact ? '_' + key : key;
|
||
}
|
||
},
|
||
|
||
checkFnExists: function (key, options) {
|
||
return key + 'Fn' in options;
|
||
}
|
||
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 903:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
var sax = __nccwpck_require__(43);
|
||
var expat /*= require('node-expat');*/ = { on: function () { }, parse: function () { } };
|
||
var helper = __nccwpck_require__(717);
|
||
var isArray = (__nccwpck_require__(566).isArray);
|
||
|
||
var options;
|
||
var pureJsParser = true;
|
||
var currentElement;
|
||
|
||
function validateOptions(userOptions) {
|
||
options = helper.copyOptions(userOptions);
|
||
helper.ensureFlagExists('ignoreDeclaration', options);
|
||
helper.ensureFlagExists('ignoreInstruction', options);
|
||
helper.ensureFlagExists('ignoreAttributes', options);
|
||
helper.ensureFlagExists('ignoreText', options);
|
||
helper.ensureFlagExists('ignoreComment', options);
|
||
helper.ensureFlagExists('ignoreCdata', options);
|
||
helper.ensureFlagExists('ignoreDoctype', options);
|
||
helper.ensureFlagExists('compact', options);
|
||
helper.ensureFlagExists('alwaysChildren', options);
|
||
helper.ensureFlagExists('addParent', options);
|
||
helper.ensureFlagExists('trim', options);
|
||
helper.ensureFlagExists('nativeType', options);
|
||
helper.ensureFlagExists('nativeTypeAttributes', options);
|
||
helper.ensureFlagExists('sanitize', options);
|
||
helper.ensureFlagExists('instructionHasAttributes', options);
|
||
helper.ensureFlagExists('captureSpacesBetweenElements', options);
|
||
helper.ensureAlwaysArrayExists(options);
|
||
helper.ensureKeyExists('declaration', options);
|
||
helper.ensureKeyExists('instruction', options);
|
||
helper.ensureKeyExists('attributes', options);
|
||
helper.ensureKeyExists('text', options);
|
||
helper.ensureKeyExists('comment', options);
|
||
helper.ensureKeyExists('cdata', options);
|
||
helper.ensureKeyExists('doctype', options);
|
||
helper.ensureKeyExists('type', options);
|
||
helper.ensureKeyExists('name', options);
|
||
helper.ensureKeyExists('elements', options);
|
||
helper.ensureKeyExists('parent', options);
|
||
helper.checkFnExists('doctype', options);
|
||
helper.checkFnExists('instruction', options);
|
||
helper.checkFnExists('cdata', options);
|
||
helper.checkFnExists('comment', options);
|
||
helper.checkFnExists('text', options);
|
||
helper.checkFnExists('instructionName', options);
|
||
helper.checkFnExists('elementName', options);
|
||
helper.checkFnExists('attributeName', options);
|
||
helper.checkFnExists('attributeValue', options);
|
||
helper.checkFnExists('attributes', options);
|
||
return options;
|
||
}
|
||
|
||
function nativeType(value) {
|
||
var nValue = Number(value);
|
||
if (!isNaN(nValue)) {
|
||
return nValue;
|
||
}
|
||
var bValue = value.toLowerCase();
|
||
if (bValue === 'true') {
|
||
return true;
|
||
} else if (bValue === 'false') {
|
||
return false;
|
||
}
|
||
return value;
|
||
}
|
||
|
||
function addField(type, value) {
|
||
var key;
|
||
if (options.compact) {
|
||
if (
|
||
!currentElement[options[type + 'Key']] &&
|
||
(isArray(options.alwaysArray) ? options.alwaysArray.indexOf(options[type + 'Key']) !== -1 : options.alwaysArray)
|
||
) {
|
||
currentElement[options[type + 'Key']] = [];
|
||
}
|
||
if (currentElement[options[type + 'Key']] && !isArray(currentElement[options[type + 'Key']])) {
|
||
currentElement[options[type + 'Key']] = [currentElement[options[type + 'Key']]];
|
||
}
|
||
if (type + 'Fn' in options && typeof value === 'string') {
|
||
value = options[type + 'Fn'](value, currentElement);
|
||
}
|
||
if (type === 'instruction' && ('instructionFn' in options || 'instructionNameFn' in options)) {
|
||
for (key in value) {
|
||
if (value.hasOwnProperty(key)) {
|
||
if ('instructionFn' in options) {
|
||
value[key] = options.instructionFn(value[key], key, currentElement);
|
||
} else {
|
||
var temp = value[key];
|
||
delete value[key];
|
||
value[options.instructionNameFn(key, temp, currentElement)] = temp;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
if (isArray(currentElement[options[type + 'Key']])) {
|
||
currentElement[options[type + 'Key']].push(value);
|
||
} else {
|
||
currentElement[options[type + 'Key']] = value;
|
||
}
|
||
} else {
|
||
if (!currentElement[options.elementsKey]) {
|
||
currentElement[options.elementsKey] = [];
|
||
}
|
||
var element = {};
|
||
element[options.typeKey] = type;
|
||
if (type === 'instruction') {
|
||
for (key in value) {
|
||
if (value.hasOwnProperty(key)) {
|
||
break;
|
||
}
|
||
}
|
||
element[options.nameKey] = 'instructionNameFn' in options ? options.instructionNameFn(key, value, currentElement) : key;
|
||
if (options.instructionHasAttributes) {
|
||
element[options.attributesKey] = value[key][options.attributesKey];
|
||
if ('instructionFn' in options) {
|
||
element[options.attributesKey] = options.instructionFn(element[options.attributesKey], key, currentElement);
|
||
}
|
||
} else {
|
||
if ('instructionFn' in options) {
|
||
value[key] = options.instructionFn(value[key], key, currentElement);
|
||
}
|
||
element[options.instructionKey] = value[key];
|
||
}
|
||
} else {
|
||
if (type + 'Fn' in options) {
|
||
value = options[type + 'Fn'](value, currentElement);
|
||
}
|
||
element[options[type + 'Key']] = value;
|
||
}
|
||
if (options.addParent) {
|
||
element[options.parentKey] = currentElement;
|
||
}
|
||
currentElement[options.elementsKey].push(element);
|
||
}
|
||
}
|
||
|
||
function manipulateAttributes(attributes) {
|
||
if ('attributesFn' in options && attributes) {
|
||
attributes = options.attributesFn(attributes, currentElement);
|
||
}
|
||
if ((options.trim || 'attributeValueFn' in options || 'attributeNameFn' in options || options.nativeTypeAttributes) && attributes) {
|
||
var key;
|
||
for (key in attributes) {
|
||
if (attributes.hasOwnProperty(key)) {
|
||
if (options.trim) attributes[key] = attributes[key].trim();
|
||
if (options.nativeTypeAttributes) {
|
||
attributes[key] = nativeType(attributes[key]);
|
||
}
|
||
if ('attributeValueFn' in options) attributes[key] = options.attributeValueFn(attributes[key], key, currentElement);
|
||
if ('attributeNameFn' in options) {
|
||
var temp = attributes[key];
|
||
delete attributes[key];
|
||
attributes[options.attributeNameFn(key, attributes[key], currentElement)] = temp;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
return attributes;
|
||
}
|
||
|
||
function onInstruction(instruction) {
|
||
var attributes = {};
|
||
if (instruction.body && (instruction.name.toLowerCase() === 'xml' || options.instructionHasAttributes)) {
|
||
var attrsRegExp = /([\w:-]+)\s*=\s*(?:"([^"]*)"|'([^']*)'|(\w+))\s*/g;
|
||
var match;
|
||
while ((match = attrsRegExp.exec(instruction.body)) !== null) {
|
||
attributes[match[1]] = match[2] || match[3] || match[4];
|
||
}
|
||
attributes = manipulateAttributes(attributes);
|
||
}
|
||
if (instruction.name.toLowerCase() === 'xml') {
|
||
if (options.ignoreDeclaration) {
|
||
return;
|
||
}
|
||
currentElement[options.declarationKey] = {};
|
||
if (Object.keys(attributes).length) {
|
||
currentElement[options.declarationKey][options.attributesKey] = attributes;
|
||
}
|
||
if (options.addParent) {
|
||
currentElement[options.declarationKey][options.parentKey] = currentElement;
|
||
}
|
||
} else {
|
||
if (options.ignoreInstruction) {
|
||
return;
|
||
}
|
||
if (options.trim) {
|
||
instruction.body = instruction.body.trim();
|
||
}
|
||
var value = {};
|
||
if (options.instructionHasAttributes && Object.keys(attributes).length) {
|
||
value[instruction.name] = {};
|
||
value[instruction.name][options.attributesKey] = attributes;
|
||
} else {
|
||
value[instruction.name] = instruction.body;
|
||
}
|
||
addField('instruction', value);
|
||
}
|
||
}
|
||
|
||
function onStartElement(name, attributes) {
|
||
var element;
|
||
if (typeof name === 'object') {
|
||
attributes = name.attributes;
|
||
name = name.name;
|
||
}
|
||
attributes = manipulateAttributes(attributes);
|
||
if ('elementNameFn' in options) {
|
||
name = options.elementNameFn(name, currentElement);
|
||
}
|
||
if (options.compact) {
|
||
element = {};
|
||
if (!options.ignoreAttributes && attributes && Object.keys(attributes).length) {
|
||
element[options.attributesKey] = {};
|
||
var key;
|
||
for (key in attributes) {
|
||
if (attributes.hasOwnProperty(key)) {
|
||
element[options.attributesKey][key] = attributes[key];
|
||
}
|
||
}
|
||
}
|
||
if (
|
||
!(name in currentElement) &&
|
||
(isArray(options.alwaysArray) ? options.alwaysArray.indexOf(name) !== -1 : options.alwaysArray)
|
||
) {
|
||
currentElement[name] = [];
|
||
}
|
||
if (currentElement[name] && !isArray(currentElement[name])) {
|
||
currentElement[name] = [currentElement[name]];
|
||
}
|
||
if (isArray(currentElement[name])) {
|
||
currentElement[name].push(element);
|
||
} else {
|
||
currentElement[name] = element;
|
||
}
|
||
} else {
|
||
if (!currentElement[options.elementsKey]) {
|
||
currentElement[options.elementsKey] = [];
|
||
}
|
||
element = {};
|
||
element[options.typeKey] = 'element';
|
||
element[options.nameKey] = name;
|
||
if (!options.ignoreAttributes && attributes && Object.keys(attributes).length) {
|
||
element[options.attributesKey] = attributes;
|
||
}
|
||
if (options.alwaysChildren) {
|
||
element[options.elementsKey] = [];
|
||
}
|
||
currentElement[options.elementsKey].push(element);
|
||
}
|
||
element[options.parentKey] = currentElement; // will be deleted in onEndElement() if !options.addParent
|
||
currentElement = element;
|
||
}
|
||
|
||
function onText(text) {
|
||
if (options.ignoreText) {
|
||
return;
|
||
}
|
||
if (!text.trim() && !options.captureSpacesBetweenElements) {
|
||
return;
|
||
}
|
||
if (options.trim) {
|
||
text = text.trim();
|
||
}
|
||
if (options.nativeType) {
|
||
text = nativeType(text);
|
||
}
|
||
if (options.sanitize) {
|
||
text = text.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>');
|
||
}
|
||
addField('text', text);
|
||
}
|
||
|
||
function onComment(comment) {
|
||
if (options.ignoreComment) {
|
||
return;
|
||
}
|
||
if (options.trim) {
|
||
comment = comment.trim();
|
||
}
|
||
addField('comment', comment);
|
||
}
|
||
|
||
function onEndElement(name) {
|
||
var parentElement = currentElement[options.parentKey];
|
||
if (!options.addParent) {
|
||
delete currentElement[options.parentKey];
|
||
}
|
||
currentElement = parentElement;
|
||
}
|
||
|
||
function onCdata(cdata) {
|
||
if (options.ignoreCdata) {
|
||
return;
|
||
}
|
||
if (options.trim) {
|
||
cdata = cdata.trim();
|
||
}
|
||
addField('cdata', cdata);
|
||
}
|
||
|
||
function onDoctype(doctype) {
|
||
if (options.ignoreDoctype) {
|
||
return;
|
||
}
|
||
doctype = doctype.replace(/^ /, '');
|
||
if (options.trim) {
|
||
doctype = doctype.trim();
|
||
}
|
||
addField('doctype', doctype);
|
||
}
|
||
|
||
function onError(error) {
|
||
error.note = error; //console.error(error);
|
||
}
|
||
|
||
module.exports = function (xml, userOptions) {
|
||
|
||
var parser = pureJsParser ? sax.parser(true, {}) : parser = new expat.Parser('UTF-8');
|
||
var result = {};
|
||
currentElement = result;
|
||
|
||
options = validateOptions(userOptions);
|
||
|
||
if (pureJsParser) {
|
||
parser.opt = {strictEntities: true};
|
||
parser.onopentag = onStartElement;
|
||
parser.ontext = onText;
|
||
parser.oncomment = onComment;
|
||
parser.onclosetag = onEndElement;
|
||
parser.onerror = onError;
|
||
parser.oncdata = onCdata;
|
||
parser.ondoctype = onDoctype;
|
||
parser.onprocessinginstruction = onInstruction;
|
||
} else {
|
||
parser.on('startElement', onStartElement);
|
||
parser.on('text', onText);
|
||
parser.on('comment', onComment);
|
||
parser.on('endElement', onEndElement);
|
||
parser.on('error', onError);
|
||
//parser.on('startCdata', onStartCdata);
|
||
//parser.on('endCdata', onEndCdata);
|
||
//parser.on('entityDecl', onEntityDecl);
|
||
}
|
||
|
||
if (pureJsParser) {
|
||
parser.write(xml).close();
|
||
} else {
|
||
if (!parser.parse(xml)) {
|
||
throw new Error('XML parsing error: ' + parser.getError());
|
||
}
|
||
}
|
||
|
||
if (result[options.elementsKey]) {
|
||
var temp = result[options.elementsKey];
|
||
delete result[options.elementsKey];
|
||
result[options.elementsKey] = temp;
|
||
delete result.text;
|
||
}
|
||
|
||
return result;
|
||
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 919:
|
||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||
|
||
var helper = __nccwpck_require__(717);
|
||
var xml2js = __nccwpck_require__(903);
|
||
|
||
function validateOptions (userOptions) {
|
||
var options = helper.copyOptions(userOptions);
|
||
helper.ensureSpacesExists(options);
|
||
return options;
|
||
}
|
||
|
||
module.exports = function(xml, userOptions) {
|
||
var options, js, json, parentKey;
|
||
options = validateOptions(userOptions);
|
||
js = xml2js(xml, options);
|
||
parentKey = 'compact' in options && options.compact ? '_parent' : 'parent';
|
||
// parentKey = ptions.compact ? '_parent' : 'parent'; // consider this
|
||
if ('addParent' in options && options.addParent) {
|
||
json = JSON.stringify(js, function (k, v) { return k === parentKey? '_' : v; }, options.spaces);
|
||
} else {
|
||
json = JSON.stringify(js, null, options.spaces);
|
||
}
|
||
return json.replace(/\u2028/g, '\\u2028').replace(/\u2029/g, '\\u2029');
|
||
};
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 396:
|
||
/***/ ((module) => {
|
||
|
||
module.exports = eval("require")("@actions/core");
|
||
|
||
|
||
/***/ }),
|
||
|
||
/***/ 781:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
module.exports = require("stream");
|
||
|
||
/***/ }),
|
||
|
||
/***/ 576:
|
||
/***/ ((module) => {
|
||
|
||
"use strict";
|
||
module.exports = require("string_decoder");
|
||
|
||
/***/ })
|
||
|
||
/******/ });
|
||
/************************************************************************/
|
||
/******/ // The module cache
|
||
/******/ var __webpack_module_cache__ = {};
|
||
/******/
|
||
/******/ // The require function
|
||
/******/ function __nccwpck_require__(moduleId) {
|
||
/******/ // Check if module is in cache
|
||
/******/ var cachedModule = __webpack_module_cache__[moduleId];
|
||
/******/ if (cachedModule !== undefined) {
|
||
/******/ return cachedModule.exports;
|
||
/******/ }
|
||
/******/ // Create a new module (and put it into the cache)
|
||
/******/ var module = __webpack_module_cache__[moduleId] = {
|
||
/******/ // no module.id needed
|
||
/******/ // no module.loaded needed
|
||
/******/ exports: {}
|
||
/******/ };
|
||
/******/
|
||
/******/ // Execute the module function
|
||
/******/ var threw = true;
|
||
/******/ try {
|
||
/******/ __webpack_modules__[moduleId](module, module.exports, __nccwpck_require__);
|
||
/******/ threw = false;
|
||
/******/ } finally {
|
||
/******/ if(threw) delete __webpack_module_cache__[moduleId];
|
||
/******/ }
|
||
/******/
|
||
/******/ // Return the exports of the module
|
||
/******/ return module.exports;
|
||
/******/ }
|
||
/******/
|
||
/************************************************************************/
|
||
/******/ /* webpack/runtime/compat get default export */
|
||
/******/ (() => {
|
||
/******/ // getDefaultExport function for compatibility with non-harmony modules
|
||
/******/ __nccwpck_require__.n = (module) => {
|
||
/******/ var getter = module && module.__esModule ?
|
||
/******/ () => (module['default']) :
|
||
/******/ () => (module);
|
||
/******/ __nccwpck_require__.d(getter, { a: getter });
|
||
/******/ return getter;
|
||
/******/ };
|
||
/******/ })();
|
||
/******/
|
||
/******/ /* webpack/runtime/define property getters */
|
||
/******/ (() => {
|
||
/******/ // define getter functions for harmony exports
|
||
/******/ __nccwpck_require__.d = (exports, definition) => {
|
||
/******/ for(var key in definition) {
|
||
/******/ if(__nccwpck_require__.o(definition, key) && !__nccwpck_require__.o(exports, key)) {
|
||
/******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });
|
||
/******/ }
|
||
/******/ }
|
||
/******/ };
|
||
/******/ })();
|
||
/******/
|
||
/******/ /* webpack/runtime/hasOwnProperty shorthand */
|
||
/******/ (() => {
|
||
/******/ __nccwpck_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
|
||
/******/ })();
|
||
/******/
|
||
/******/ /* webpack/runtime/make namespace object */
|
||
/******/ (() => {
|
||
/******/ // define __esModule on exports
|
||
/******/ __nccwpck_require__.r = (exports) => {
|
||
/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
|
||
/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
|
||
/******/ }
|
||
/******/ Object.defineProperty(exports, '__esModule', { value: true });
|
||
/******/ };
|
||
/******/ })();
|
||
/******/
|
||
/******/ /* webpack/runtime/compat */
|
||
/******/
|
||
/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/";
|
||
/******/
|
||
/************************************************************************/
|
||
var __webpack_exports__ = {};
|
||
// This entry need to be wrapped in an IIFE because it need to be in strict mode.
|
||
(() => {
|
||
"use strict";
|
||
// ESM COMPAT FLAG
|
||
__nccwpck_require__.r(__webpack_exports__);
|
||
|
||
;// CONCATENATED MODULE: external "fs"
|
||
const external_fs_namespaceObject = require("fs");
|
||
;// CONCATENATED MODULE: ./src/lib/sequence.js
|
||
/* harmony default export */ const sequence = (async (promises) => {
|
||
let results = [];
|
||
for (let promise of promises) {
|
||
results = [
|
||
...results,
|
||
await promise(),
|
||
]
|
||
}
|
||
return results;
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/recipe/root-readme.js
|
||
|
||
|
||
/* harmony default export */ const root_readme = (async (root) => {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/README.md`,
|
||
`\n## recipes\n\n`
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/README.md`,
|
||
`* [procedures](procedures)\n`
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/README.md`,
|
||
`* [unit procedures](unit-procedures)\n`
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/README.md`,
|
||
`* [operations](operations)\n`
|
||
)
|
||
});
|
||
;// CONCATENATED MODULE: external "path"
|
||
const external_path_namespaceObject = require("path");
|
||
var external_path_default = /*#__PURE__*/__nccwpck_require__.n(external_path_namespaceObject);
|
||
;// CONCATENATED MODULE: ./src/lib/files-2.js
|
||
|
||
|
||
/* harmony default export */ const files_2 = (async (path) => (await external_fs_namespaceObject.promises.readdir(path, { withFileTypes: true }))
|
||
.filter(dirent => !dirent.isDirectory())
|
||
.map(dirent => dirent.name)
|
||
.sort(
|
||
(a, b) => a.localeCompare(b),
|
||
));
|
||
|
||
;// CONCATENATED MODULE: ./src/recipe/recipe-files.js
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const recipe_files = ((recipes) => files_2(
|
||
recipes,
|
||
)
|
||
.then(
|
||
(names) => names
|
||
.filter(
|
||
(name) => external_path_default().extname(name).toUpperCase() === '.OXML'
|
||
|| external_path_default().extname(name).toUpperCase() === '.UXML'
|
||
|| external_path_default().extname(name).toUpperCase() === '.PXML',
|
||
)
|
||
)
|
||
.then(
|
||
(names) => Promise.all(
|
||
names
|
||
.map(
|
||
(name) => external_fs_namespaceObject.promises.readFile(
|
||
`${recipes}/${name}`,
|
||
'utf8',
|
||
).then(
|
||
(xml) => ({
|
||
name,
|
||
xml,
|
||
})
|
||
)
|
||
)
|
||
)
|
||
));
|
||
|
||
// EXTERNAL MODULE: ./node_modules/xml-js/lib/index.js
|
||
var lib = __nccwpck_require__(821);
|
||
var lib_default = /*#__PURE__*/__nccwpck_require__.n(lib);
|
||
;// CONCATENATED MODULE: ./src/lib/text.js
|
||
/* harmony default export */ const lib_text = (({ elements }) =>
|
||
elements
|
||
? elements
|
||
.find(
|
||
({
|
||
type
|
||
}) =>
|
||
type === 'text'
|
||
)?.text
|
||
: '');
|
||
|
||
;// CONCATENATED MODULE: ./src/lib/elements.js
|
||
/* harmony default export */ const lib_elements = (({ elements }, elementName) =>
|
||
elements
|
||
.filter(
|
||
({ type, name }) =>
|
||
type === 'element'
|
||
&& name === elementName
|
||
));
|
||
|
||
;// CONCATENATED MODULE: ./src/recipe/make-units-recipes.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const make_units_recipes = ((files) => {
|
||
const recipes = files
|
||
.map(
|
||
({
|
||
name,
|
||
xml,
|
||
}) => ({
|
||
name,
|
||
recipeElement: lib_elements(
|
||
JSON.parse(
|
||
lib_default().xml2json(xml, { compact: false, spaces: 2 })
|
||
),
|
||
'RecipeElement',
|
||
)[0],
|
||
})
|
||
)
|
||
.map(
|
||
({
|
||
name,
|
||
recipeElement,
|
||
}) => ({
|
||
name,
|
||
recipeElement,
|
||
resource: lib_text(
|
||
lib_elements(
|
||
lib_elements(
|
||
recipeElement,
|
||
'Header',
|
||
)[0],
|
||
'Resource',
|
||
)[0],
|
||
),
|
||
classBased: lib_text(
|
||
lib_elements(
|
||
lib_elements(
|
||
recipeElement,
|
||
'Header',
|
||
)[0],
|
||
'ClassBased',
|
||
)[0],
|
||
)
|
||
})
|
||
)
|
||
.reduce(
|
||
(acc, item) => ({
|
||
...acc,
|
||
[`${item.resource},${item.classBased}`]: [
|
||
item,
|
||
...acc[`${item.resource},${item.classBased}`] || [],
|
||
]
|
||
}),
|
||
{},
|
||
);
|
||
|
||
return Object.keys(
|
||
recipes,
|
||
).reduce(
|
||
(acc, key) => ({
|
||
...acc,
|
||
[key]: recipes[key]
|
||
.reduce(
|
||
(acc2, item) => ({
|
||
...acc2,
|
||
[external_path_default().extname(item.name).toUpperCase()]: [
|
||
...acc2[external_path_default().extname(item.name).toUpperCase()] || [],
|
||
item,
|
||
]
|
||
}),
|
||
{},
|
||
)
|
||
}),
|
||
{},
|
||
);
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/recipe/steps-data.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
const formulaValue = (fv) => {
|
||
return {
|
||
name: text(elements(fv, 'Name')[0]),
|
||
type: elements(fv, 'Real')[0]
|
||
? 'Real'
|
||
: elements(fv, 'Integer')[0]
|
||
? 'Integer'
|
||
: elements(fv, 'String')[0]
|
||
? 'String'
|
||
: elements(fv, 'EnumerationSet')[0]
|
||
? 'enumset'
|
||
: 'ERROR',
|
||
value: elements(fv, 'Value')[0]
|
||
? elements(fv, 'Real')[0]
|
||
? text(elements(fv, 'Real')[0])
|
||
: elements(fv, 'Integer')[0]
|
||
? text(elements(fv, 'Integer')[0])
|
||
: elements(fv, 'String')[0]
|
||
? text(elements(fv, 'String')[0])
|
||
: elements(fv, 'EnumerationSet')[0]
|
||
? text(elements(fv, 'EnumerationMember')[0])
|
||
: 'ERROR'
|
||
: '',
|
||
defer: elements(fv, 'Defer')[0]
|
||
? text(elements(fv, 'Defer')[0])
|
||
: '',
|
||
eu: elements(fv, 'EnumerationSet')[0]
|
||
? text(elements(fv, 'EnumerationSet')[0])
|
||
: elements(fv, 'EngineeringUnits')[0]
|
||
? text(elements(fv, 'EngineeringUnits')[0])
|
||
: ''
|
||
}
|
||
}
|
||
|
||
/* harmony default export */ const steps_data = (({
|
||
recipeElement,
|
||
}) => {
|
||
return lib_elements(recipeElement, 'Steps')
|
||
.map(
|
||
(steps) => lib_elements(steps, 'Step')
|
||
.map(
|
||
(step) => lib_text(lib_elements(step, 'StepRecipeID')[0])
|
||
)
|
||
)
|
||
});
|
||
;// CONCATENATED MODULE: ./src/lib/base-name.js
|
||
/* harmony default export */ const base_name = ((name) => name.replace(/\.[^/.]+$/, ""));
|
||
;// CONCATENATED MODULE: ./src/recipe/recipe-data.js
|
||
|
||
|
||
|
||
/* harmony default export */ const recipe_data = (({
|
||
recipeElement,
|
||
root,
|
||
name,
|
||
type,
|
||
resource,
|
||
}) => {
|
||
const fileName = resource ? `${root}/${type}/${base_name(name)}.md` : `${root}/${type}/${base_name(name)}.md`;
|
||
return {
|
||
recipeElement,
|
||
root,
|
||
resource,
|
||
fileName,
|
||
name,
|
||
type,
|
||
steps: steps_data({
|
||
recipeElement,
|
||
}),
|
||
};
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/recipe/recipes-data.js
|
||
|
||
|
||
|
||
/* harmony default export */ const recipes_data = (({
|
||
recipes,
|
||
extension,
|
||
root,
|
||
}) => recipes
|
||
.sort(
|
||
({
|
||
name,
|
||
},
|
||
{
|
||
name: other,
|
||
}) => name.localeCompare(other),
|
||
)
|
||
.map(
|
||
({
|
||
name,
|
||
recipeElement,
|
||
resource,
|
||
}) => {
|
||
if (extension === '.OXML') {
|
||
return recipe_data({
|
||
recipeElement,
|
||
resource,
|
||
root,
|
||
name,
|
||
type: 'operations',
|
||
})
|
||
}
|
||
|
||
if (extension === '.UXML') {
|
||
return recipe_data({
|
||
recipeElement,
|
||
resource,
|
||
root,
|
||
name,
|
||
type: 'unit-procedures',
|
||
})
|
||
}
|
||
if (extension === '.PXML') {
|
||
return recipe_data({
|
||
recipeElement,
|
||
resource,
|
||
root,
|
||
name,
|
||
type: 'procedures',
|
||
})
|
||
}
|
||
}
|
||
));
|
||
|
||
;// CONCATENATED MODULE: ./src/recipe/link-recipes.js
|
||
|
||
|
||
|
||
/* harmony default export */ const link_recipes = (async ({
|
||
root,
|
||
recipes,
|
||
}) => {
|
||
|
||
const flattened = recipes
|
||
.flatMap((steps) => steps)
|
||
.flatMap((steps) => steps)
|
||
.flatMap(({ recipes: r }) => r)
|
||
.map(
|
||
({
|
||
steps,
|
||
...rest
|
||
}) => ({
|
||
...rest,
|
||
steps: steps.flatMap(steps => steps),
|
||
})
|
||
|
||
);
|
||
const recipePhases = flattened
|
||
.filter(({ type }) => type === 'operations')
|
||
.flatMap(({ steps }) => steps)
|
||
.filter(
|
||
(value, index, array) => array
|
||
.indexOf(value) === index,
|
||
)
|
||
const operations = flattened
|
||
.filter(({ type }) => type === 'unit-procedures')
|
||
.flatMap(({ steps }) => steps)
|
||
.filter(
|
||
(value, index, array) => array
|
||
.indexOf(value) === index,
|
||
)
|
||
const unitProcedures = flattened
|
||
.filter(({ type }) => type === 'procedures')
|
||
.flatMap(({ steps }) => steps)
|
||
.filter(
|
||
(value, index, array) => array
|
||
.indexOf(value) === index,
|
||
)
|
||
for (const o of recipePhases
|
||
.map(
|
||
(item) => ({
|
||
item,
|
||
refs: flattened
|
||
.filter(({ type }) => type === 'operations')
|
||
.filter(
|
||
({ steps }) => steps.indexOf(item) >= 0,
|
||
)
|
||
.map(
|
||
({
|
||
fileName,
|
||
name,
|
||
}) => name,
|
||
)
|
||
})
|
||
)) {
|
||
if (o.refs.length) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/recipe-phases/${o.item}.md`,
|
||
`\n## operations\n\n`
|
||
)
|
||
for (const ref of o.refs) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/recipe-phases/${o.item}.md`,
|
||
`* [${base_name(ref)}](../operations/${base_name(ref)}.md)\n`
|
||
)
|
||
}
|
||
}
|
||
}
|
||
|
||
|
||
for (const o of operations
|
||
.map(
|
||
(item) => ({
|
||
item,
|
||
refs: flattened
|
||
.filter(({ type }) => type === 'unit-procedures')
|
||
.filter(
|
||
({ steps }) => steps.indexOf(item) >= 0,
|
||
)
|
||
.map(
|
||
({
|
||
fileName,
|
||
name,
|
||
}) => name,
|
||
)
|
||
})
|
||
)) {
|
||
if (o.refs.length) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/operations/${o.item}.md`,
|
||
`\n## unit-procedures\n\n`
|
||
)
|
||
for (const ref of o.refs) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/operations/${o.item}.md`,
|
||
`* [${base_name(ref)}](../unit-procedures/${base_name(ref)}.md)\n`
|
||
)
|
||
}
|
||
}
|
||
}
|
||
for (const o of unitProcedures
|
||
.map(
|
||
(item) => ({
|
||
item,
|
||
refs: flattened
|
||
.filter(({ type }) => type === 'procedures')
|
||
.filter(
|
||
({ steps }) => steps.indexOf(item) >= 0,
|
||
)
|
||
.map(
|
||
({
|
||
name,
|
||
}) => name,
|
||
)
|
||
})
|
||
)) {
|
||
if (o.refs.length) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/unit-procedures/${o.item}.md`,
|
||
`\n## procedures\n\n`
|
||
)
|
||
for (const ref of o.refs) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/unit-procedures/${o.item}.md`,
|
||
`* [${base_name(ref)}](../procedures/${base_name(ref)}.md)\n`
|
||
)
|
||
}
|
||
}
|
||
}
|
||
});
|
||
;// CONCATENATED MODULE: ./src/lib/get-attributes.js
|
||
const getAttributes = (elements) => {
|
||
const ret =
|
||
elements
|
||
.reduce(
|
||
((current, { type, name, elements }) => ({
|
||
...current,
|
||
...type === 'element' && elements && {
|
||
[name]: elements
|
||
.find(
|
||
({
|
||
type,
|
||
}) =>
|
||
type === 'text'
|
||
)?.text || getAttributes(elements),
|
||
}
|
||
})
|
||
),
|
||
{},
|
||
)
|
||
return ret
|
||
}
|
||
|
||
/* harmony default export */ const get_attributes = (getAttributes);
|
||
|
||
;// CONCATENATED MODULE: ./src/lib/write-attributes.js
|
||
|
||
|
||
|
||
/* harmony default export */ const write_attributes = (async ({ elements }, path) => {
|
||
const attributes = get_attributes(elements)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${'property'.padEnd(40)} | ${'value'.padEnd(20)} |\n`
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`|${'-'.repeat(42)}|${'-'.repeat(22)}|\n`
|
||
)
|
||
for (const property of Object.getOwnPropertyNames(attributes)) {
|
||
if (typeof attributes[property] === 'string') {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${property.padEnd(40) } | ${attributes[property].padEnd(20)} |\n`
|
||
)
|
||
}
|
||
}
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`\n`
|
||
)
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/lib/exists.js
|
||
|
||
|
||
/* harmony default export */ const exists = (async (path) => {
|
||
try {
|
||
await external_fs_namespaceObject.promises.access(path)
|
||
} catch (ex) {
|
||
return false
|
||
}
|
||
return true
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/lib/write-if-new.js
|
||
|
||
|
||
|
||
/* harmony default export */ const write_if_new = (async (fileName, content) => {
|
||
if (!(await exists(fileName))) {
|
||
return external_fs_namespaceObject.promises.writeFile(
|
||
fileName,
|
||
content
|
||
)
|
||
}
|
||
});
|
||
|
||
// EXTERNAL MODULE: ./node_modules/markdown-escape/index.js
|
||
var markdown_escape = __nccwpck_require__(913);
|
||
var markdown_escape_default = /*#__PURE__*/__nccwpck_require__.n(markdown_escape);
|
||
;// CONCATENATED MODULE: ./src/lib/escape-text.js
|
||
|
||
|
||
|
||
/* harmony default export */ const escape_text = (p =>
|
||
markdown_escape_default()(lib_text(p)));
|
||
|
||
;// CONCATENATED MODULE: ./src/recipe/parameters.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const parameters = (async ({
|
||
type,
|
||
root,
|
||
recipeElement,
|
||
name,
|
||
resource,
|
||
}) => {
|
||
const base = base_name(name);
|
||
const fileName = resource ? `${root}/${type}/${base}.md` : `${root}/${type}/${base}.md`
|
||
|
||
await write_if_new(
|
||
fileName,
|
||
`# ${base}\n\n`,
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`\n**${type.slice(0, -1)}**\n\n`,
|
||
)
|
||
|
||
|
||
await write_attributes(
|
||
lib_elements(
|
||
recipeElement,
|
||
'Header',
|
||
)[0],
|
||
fileName,
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`
|
||
## parameters
|
||
|
||
| Name | PLCReference | Type | Default | High | Low | EU | Scale |
|
||
| -------------------------------------- | ------------------ | ------------------ | ------------------ | ------------------ | ------------------ | ------------------ | ------------------ |`
|
||
)
|
||
const parameters = lib_elements(
|
||
recipeElement,
|
||
'Parameter',
|
||
)
|
||
for (const p of parameters) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`
|
||
|${escape_text(lib_elements(p, 'Name')[0]).padEnd(40)}|${lib_text(lib_elements(p, 'PLCReference')[0]).padEnd(20)}|`,
|
||
)
|
||
if (lib_elements(p, 'Real')[0]) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`${'Real'.padEnd(20)}|${lib_text(lib_elements(p, 'Real')[0]).padEnd(20)}|${lib_text(lib_elements(p, 'High')[0]).padEnd(20)}|${lib_text(lib_elements(p, 'Low')[0]).padEnd(20)}|${lib_text(lib_elements(p, 'EngineeringUnits')[0]).padEnd(20)}|`,
|
||
)
|
||
}
|
||
else if (lib_elements(p, 'Integer')[0]) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`${'Integer'.padEnd(20)}|${lib_text(lib_elements(p, 'Integer')[0]).padEnd(20)}|${lib_text(lib_elements(p, 'High')[0]).padEnd(20)}|${lib_text(lib_elements(p, 'Low')[0]).padEnd(20)}|${lib_text(lib_elements(p, 'EngineeringUnits')[0]).padEnd(20)}|`,
|
||
)
|
||
}
|
||
else if (lib_elements(p, 'EnumerationSet')[0]) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`${'enumset'.padEnd(20)}|${escape_text(lib_elements(p, 'EnumerationMember')[0]).padEnd(20)}|${''.padEnd(20)}|${''.padEnd(20)}|${escape_text(lib_elements(p, 'EnumerationSet')[0]).padEnd(20)}|`,
|
||
)
|
||
}
|
||
if (lib_elements(p, 'Scale')[0]) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`${escape_text(lib_elements(p, 'Scale')[0]).padEnd(20)}|`,
|
||
)
|
||
} else {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`${''.padEnd(20)}|`,
|
||
)
|
||
|
||
}
|
||
}
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`
|
||
|
||
## reports
|
||
|
||
| Name | Type | EU |
|
||
| -------------------------------------- | ------------------ | ------------------ |`
|
||
)
|
||
|
||
const reports = lib_elements(
|
||
recipeElement,
|
||
'Report',
|
||
)
|
||
for (const r of reports) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`
|
||
|${escape_text(lib_elements(r, 'Name')[0]).padEnd(40)}|`,
|
||
)
|
||
if (lib_elements(r, 'Real')[0]) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`${'Real'.padEnd(20)}|${lib_text(lib_elements(r, 'EngineeringUnits')[0]).padEnd(20)}|`,
|
||
)
|
||
}
|
||
else if (lib_elements(r, 'Integer')[0]) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`${'Integer'.padEnd(20)}|${lib_text(lib_elements(r, 'EngineeringUnits')[0]).padEnd(20)}|`,
|
||
)
|
||
} else {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`${'itsenum'.padEnd(20)}|${lib_text(lib_elements(r, 'EngineeringUnits')[0]).padEnd(20)}|`,
|
||
)
|
||
}
|
||
|
||
}
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`\n`,
|
||
)
|
||
return fileName
|
||
});
|
||
;// CONCATENATED MODULE: ./src/recipe/steps.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
const steps_formulaValue = (fv) => {
|
||
return {
|
||
name: lib_text(lib_elements(fv, 'Name')[0]),
|
||
type: lib_elements(fv, 'Real')[0]
|
||
? 'Real'
|
||
: lib_elements(fv, 'Integer')[0]
|
||
? 'Integer'
|
||
: lib_elements(fv, 'String')[0]
|
||
? 'String'
|
||
: lib_elements(fv, 'EnumerationSet')[0]
|
||
? 'enumset'
|
||
: 'ERROR',
|
||
value: lib_elements(fv, 'Value')[0]
|
||
? lib_elements(fv, 'Real')[0]
|
||
? lib_text(lib_elements(fv, 'Real')[0])
|
||
: lib_elements(fv, 'Integer')[0]
|
||
? lib_text(lib_elements(fv, 'Integer')[0])
|
||
: lib_elements(fv, 'String')[0]
|
||
? lib_text(lib_elements(fv, 'String')[0])
|
||
: lib_elements(fv, 'EnumerationSet')[0]
|
||
? lib_text(lib_elements(fv, 'EnumerationMember')[0])
|
||
: 'ERROR'
|
||
: '',
|
||
defer: lib_elements(fv, 'Defer')[0]
|
||
? lib_text(lib_elements(fv, 'Defer')[0])
|
||
: '',
|
||
eu: lib_elements(fv, 'EnumerationSet')[0]
|
||
? lib_text(lib_elements(fv, 'EnumerationSet')[0])
|
||
: lib_elements(fv, 'EngineeringUnits')[0]
|
||
? lib_text(lib_elements(fv, 'EngineeringUnits')[0])
|
||
: ''
|
||
}
|
||
}
|
||
|
||
/* harmony default export */ const steps = (async ({
|
||
fileName,
|
||
recipeElement,
|
||
type,
|
||
}) => {
|
||
for (const steps of lib_elements(recipeElement, 'Steps')) {
|
||
await external_fs_namespaceObject.promises.appendFile(fileName, '\n## steps\n\n')
|
||
for (const step of lib_elements(steps, 'Step')) {
|
||
const stepRecipeId = lib_text(lib_elements(step, 'StepRecipeID')[0])
|
||
if (stepRecipeId !== '$NULL') {
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(fileName, `### ${lib_text(lib_elements(step, 'Name')[0])}\n\n`)
|
||
if (type === 'operations') {
|
||
await external_fs_namespaceObject.promises.appendFile(fileName, `[${stepRecipeId}](../recipe-phases/${stepRecipeId}.md)\n`)
|
||
}
|
||
if (type === 'unit-procedures') {
|
||
await external_fs_namespaceObject.promises.appendFile(fileName, `[${stepRecipeId}](../operations/${stepRecipeId}.md)\n`)
|
||
}
|
||
if (type === 'procedures') {
|
||
await external_fs_namespaceObject.promises.appendFile(fileName, `[${stepRecipeId}](../unit-procedures/${stepRecipeId}.md)\n`)
|
||
}
|
||
|
||
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(fileName, `
|
||
#### formula values
|
||
|
||
| name | type | value | defer | EU |
|
||
| ---------------------------- | ------------------ | ------------------ | ------------------ | ------------------ |
|
||
`,
|
||
)
|
||
for (const fv of lib_elements(step, 'FormulaValue').map(steps_formulaValue)) {
|
||
await external_fs_namespaceObject.promises.appendFile(fileName, `|${fv.name.padEnd(30)}|${fv.type.padEnd(20)}|${fv.value.padEnd(20)}|${fv.defer.padEnd(20)}|${fv.eu.padEnd(20)}|\n`)
|
||
|
||
}
|
||
}
|
||
}
|
||
}
|
||
return lib_elements(recipeElement, 'Steps')
|
||
.map(
|
||
(steps) => lib_elements(steps, 'Step')
|
||
.map(
|
||
(step) => lib_text(lib_elements(step, 'StepRecipeID')[0])
|
||
)
|
||
)
|
||
});
|
||
;// CONCATENATED MODULE: ./src/recipe/write/write-recipe.js
|
||
|
||
|
||
|
||
/* harmony default export */ const write_recipe = (async ({
|
||
recipeElement,
|
||
root,
|
||
name,
|
||
type,
|
||
resource,
|
||
}) => {
|
||
const fileName = await parameters({
|
||
root,
|
||
type,
|
||
recipeElement,
|
||
name,
|
||
resource,
|
||
})
|
||
|
||
return {
|
||
fileName,
|
||
name,
|
||
type,
|
||
steps: await steps({
|
||
fileName,
|
||
recipeElement,
|
||
type,
|
||
resource,
|
||
}),
|
||
};
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/lib/mkdir-if-new.js
|
||
|
||
|
||
|
||
/* harmony default export */ const mkdir_if_new = (async (path) => external_fs_namespaceObject.promises.mkdir(
|
||
path,
|
||
{ recursive: true }
|
||
));
|
||
|
||
;// CONCATENATED MODULE: ./src/lib/read-me.js
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const read_me = (async ({
|
||
title,
|
||
root,
|
||
}) => {
|
||
await mkdir_if_new(root)
|
||
|
||
const path = `${root}/README.md`
|
||
|
||
await write_if_new(
|
||
path,
|
||
`# ${title}\n\n`
|
||
)
|
||
return path
|
||
});
|
||
;// CONCATENATED MODULE: ./src/recipe/write/write-recipes.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const write_recipes = (async ({
|
||
recipes,
|
||
extension,
|
||
root,
|
||
uPath,
|
||
unit,
|
||
}) => sequence(
|
||
recipes
|
||
.sort(
|
||
({
|
||
name,
|
||
},
|
||
{
|
||
name: other,
|
||
}) => name.localeCompare(other),
|
||
)
|
||
.map(
|
||
({
|
||
name,
|
||
recipeElement,
|
||
resource,
|
||
}) => async () => {
|
||
const base = external_path_default().parse(name).name
|
||
if (extension === '.OXML') {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/${uPath}/${unit}.md`,
|
||
`* [${base}](../operations/${base}.md)\n`,
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/operations/README.md`,
|
||
` * [${base}](./${base}.md)\n`,
|
||
)
|
||
return write_recipe({
|
||
recipeElement,
|
||
resource,
|
||
root,
|
||
name,
|
||
type: 'operations',
|
||
})
|
||
}
|
||
|
||
if (extension === '.UXML') {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/${uPath}/${unit}.md`,
|
||
`* [${base}](../unit-procedures/${base}.md)\n`,
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/unit-procedures/README.md`,
|
||
` * [${base}](./${base}.md)\n`,
|
||
)
|
||
return write_recipe({
|
||
recipeElement,
|
||
resource,
|
||
root,
|
||
name,
|
||
type: 'unit-procedures',
|
||
})
|
||
}
|
||
if (extension === '.PXML') {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
await read_me({
|
||
title: 'procedures',
|
||
root: `${root}/procedures`,
|
||
// purpose: base,
|
||
}),
|
||
`* [${base}](../${base}.md)\n`,
|
||
)
|
||
return write_recipe({
|
||
recipeElement,
|
||
resource,
|
||
root,
|
||
name,
|
||
type: 'procedures',
|
||
})
|
||
}
|
||
}
|
||
)
|
||
));
|
||
|
||
;// CONCATENATED MODULE: ./src/recipe/write/links.js
|
||
|
||
|
||
|
||
/* harmony default export */ const links = (async ({
|
||
root,
|
||
extension,
|
||
unit,
|
||
uPath,
|
||
}) => {
|
||
if (unit !== '') {
|
||
await read_me({
|
||
title: 'units',
|
||
root: `${root}/units`,
|
||
})
|
||
}
|
||
if (extension === '.OXML') {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
await read_me({
|
||
title: 'operations',
|
||
root: `${root}/operations`,
|
||
// purpose: base,
|
||
}),
|
||
`* [${unit}](../${uPath}/${unit}.md)\n`,
|
||
)
|
||
}
|
||
if (extension === '.UXML') {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
await read_me({
|
||
title: 'unit procedures',
|
||
root: `${root}/unit-procedures`,
|
||
// purpose: base,
|
||
}),
|
||
`* [${unit}](../${uPath}/${unit}.md)\n`,
|
||
)
|
||
}
|
||
});
|
||
;// CONCATENATED MODULE: ./src/recipe/write/index.js
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const write = (async ({ recipes }) => sequence(
|
||
recipes
|
||
.flatMap((steps) => steps)
|
||
.flatMap((steps) => steps)
|
||
.map(
|
||
({
|
||
root,
|
||
extension,
|
||
unit,
|
||
uPath,
|
||
recipes: r,
|
||
}) => async () => {
|
||
await links({
|
||
root,
|
||
extension,
|
||
unit,
|
||
uPath,
|
||
});
|
||
await write_recipes({
|
||
recipes: r,
|
||
root,
|
||
extension,
|
||
uPath,
|
||
unit,
|
||
})
|
||
|
||
}
|
||
)
|
||
));
|
||
;// CONCATENATED MODULE: ./src/lib/texts.js
|
||
|
||
|
||
/* harmony default export */ const texts = ((element, property) =>
|
||
element.elements
|
||
? element
|
||
.elements
|
||
.filter(
|
||
({
|
||
type,
|
||
name,
|
||
}) =>
|
||
type === 'element'
|
||
&& name === property
|
||
).map(lib_text)
|
||
: []);
|
||
|
||
;// CONCATENATED MODULE: ./src/recipe/link-equipment-modules/unit-link.js
|
||
|
||
|
||
|
||
/* harmony default export */ const unit_link = (async ({
|
||
root,
|
||
unitName,
|
||
unit,
|
||
recipes,
|
||
extension,
|
||
title,
|
||
path,
|
||
}) => {
|
||
const unitProcedures = recipes
|
||
.flatMap((steps) => steps)
|
||
.flatMap((steps) => steps)
|
||
.filter(
|
||
({
|
||
extension: e,
|
||
unit,
|
||
}) => e.toUpperCase() === extension
|
||
&& (unit === unitName),
|
||
)
|
||
.flatMap(
|
||
({
|
||
recipes: r,
|
||
}) => r.map(
|
||
({
|
||
fileName,
|
||
name,
|
||
}) => ({
|
||
fileName,
|
||
name,
|
||
}),
|
||
),
|
||
);
|
||
|
||
if (unitProcedures.length) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/units/${unit}.md`,
|
||
`\n## ${title}\n\n`,
|
||
)
|
||
for (const { name } of unitProcedures) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/units/${unit}.md`,
|
||
`* [${base_name(name)}](../${path}/${base_name(name)}.md)\n`,
|
||
)
|
||
}
|
||
}
|
||
});
|
||
;// CONCATENATED MODULE: ./src/recipe/link-equipment-modules/index.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const link_equipment_modules = (async ({
|
||
recipes,
|
||
modelFile,
|
||
root,
|
||
}) => {
|
||
const areaModel = JSON.parse(
|
||
lib_default().xml2json(
|
||
await external_fs_namespaceObject.promises.readFile(
|
||
modelFile,
|
||
'utf8',
|
||
),
|
||
{ compact: false, spaces: 2 })
|
||
)
|
||
|
||
|
||
for (
|
||
const unit of
|
||
lib_elements(
|
||
lib_elements(
|
||
areaModel,
|
||
'AreaModel',
|
||
)[0],
|
||
'Unit',
|
||
)
|
||
) {
|
||
const unitName = texts(
|
||
unit,
|
||
'UniqueName',
|
||
)[0];
|
||
const unitClass = texts(
|
||
unit,
|
||
'Class',
|
||
)[0];
|
||
|
||
await unit_link({
|
||
root,
|
||
unitName: unitClass,
|
||
unit: unitName,
|
||
recipes,
|
||
extension: '.UXML',
|
||
title: `unit procedures (via unit class ${unitClass})`,
|
||
path: 'unit-procedures',
|
||
})
|
||
await unit_link({
|
||
root,
|
||
unitName,
|
||
unit: unitName,
|
||
recipes,
|
||
extension: '.UXML',
|
||
title: 'unit procedures',
|
||
path: 'unit-procedures',
|
||
});
|
||
|
||
await unit_link({
|
||
root,
|
||
unitName: unitClass,
|
||
unit: unitName,
|
||
recipes,
|
||
extension: '.OXML',
|
||
title: `operations (via unit class ${unitClass})`,
|
||
path: 'operations',
|
||
});
|
||
await unit_link({
|
||
root,
|
||
unitName,
|
||
unit: unitName,
|
||
recipes,
|
||
extension: '.OXML',
|
||
title: 'operations',
|
||
path: 'operations',
|
||
});
|
||
}
|
||
|
||
for (
|
||
const equipmentModule of
|
||
lib_elements(
|
||
lib_elements(
|
||
areaModel,
|
||
'AreaModel',
|
||
)[0],
|
||
'EquipmentModule',
|
||
)
|
||
) {
|
||
const equipmentModuleName = texts(
|
||
equipmentModule,
|
||
'UniqueName',
|
||
)[0];
|
||
const recipePhaseName = texts(
|
||
equipmentModule,
|
||
'RecipePhase',
|
||
)[0];
|
||
const unit = lib_elements(
|
||
lib_elements(
|
||
areaModel,
|
||
'AreaModel',
|
||
)[0],
|
||
'Unit',
|
||
).find(
|
||
(u) => texts(
|
||
u,
|
||
'ConfiguredEquipmentModuleName',
|
||
).find(
|
||
(name) => name === equipmentModuleName,
|
||
)
|
||
);
|
||
|
||
if (!unit) {
|
||
} else {
|
||
const unitName = texts(
|
||
unit,
|
||
'UniqueName',
|
||
)[0];
|
||
const unitClass = texts(
|
||
unit,
|
||
'Class',
|
||
)[0];
|
||
|
||
const operations = recipes
|
||
.flatMap((steps) => steps)
|
||
.flatMap((steps) => steps)
|
||
.filter(
|
||
({
|
||
extension,
|
||
unit,
|
||
}) => extension.toUpperCase() === '.OXML'
|
||
&& (unit === unitName || unit === unitClass),
|
||
)
|
||
.flatMap(
|
||
({
|
||
recipes: r,
|
||
}) => r.filter(
|
||
({ steps }) => steps.flatMap(steps => steps).indexOf(recipePhaseName) >= 0,
|
||
).map(
|
||
({
|
||
fileName,
|
||
name,
|
||
}) => ({
|
||
fileName,
|
||
name,
|
||
}),
|
||
),
|
||
);
|
||
if (operations.length) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/equipment-modules/${equipmentModuleName}.md`,
|
||
`\n## operations\n\n`,
|
||
)
|
||
for (const { name } of operations) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/equipment-modules/${equipmentModuleName}.md`,
|
||
`* [${base_name(name)}](../operations/${base_name(name)}.md)\n`,
|
||
)
|
||
}
|
||
}
|
||
}
|
||
}
|
||
});
|
||
;// CONCATENATED MODULE: ./src/recipe/index.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const recipe = (async (
|
||
root,
|
||
recipes,
|
||
modelFile,
|
||
) => {
|
||
await root_readme(root);
|
||
|
||
const result = await recipe_files(recipes)
|
||
.then(make_units_recipes)
|
||
.then(
|
||
(unitsRecipes) => {
|
||
return sequence(
|
||
Object.keys(
|
||
unitsRecipes,
|
||
)
|
||
.sort(
|
||
(a, b) => a.localeCompare(b),
|
||
)
|
||
.map(
|
||
(key) => async () => {
|
||
const [unit, classBased] = key.split(',');
|
||
const unitRecipes = unitsRecipes[key]
|
||
return sequence(
|
||
Object.keys(
|
||
unitRecipes
|
||
)
|
||
.sort(
|
||
(a, b) => a.localeCompare(b),
|
||
)
|
||
.map(
|
||
(extension) => async () => {
|
||
const uPath = classBased === 'true' ? 'unit-classes' : 'units';
|
||
return {
|
||
root,
|
||
extension,
|
||
unit,
|
||
uPath,
|
||
recipes: recipes_data({
|
||
recipes: unitRecipes[extension],
|
||
root,
|
||
extension,
|
||
}),
|
||
}
|
||
}
|
||
)
|
||
)
|
||
}
|
||
)
|
||
)
|
||
}
|
||
);
|
||
await write({
|
||
recipes: result
|
||
});
|
||
await link_recipes({
|
||
root,
|
||
recipes: result,
|
||
});
|
||
await link_equipment_modules({
|
||
modelFile,
|
||
root,
|
||
recipes: result,
|
||
});
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/lib/get-opc.js
|
||
/* harmony default export */ const get_opc = ((opc, path, address) => {
|
||
const route = [
|
||
...path.split('.'),
|
||
...address.split('.')
|
||
]
|
||
if (route[0] === 'INSTRUCTIONS') {
|
||
return
|
||
}
|
||
|
||
const channel = opc
|
||
.project
|
||
.channels
|
||
.find(
|
||
({ "common.ALLTYPES_NAME": name }) =>
|
||
name.toUpperCase() === route[0].toUpperCase()
|
||
)
|
||
if (channel) {
|
||
const device = channel
|
||
.devices
|
||
.find(
|
||
({ "common.ALLTYPES_NAME": name }) =>
|
||
name.toUpperCase() === route[1].toUpperCase()
|
||
)
|
||
if (device) {
|
||
const group = route.slice(2, route.length - 1)
|
||
.reduce(
|
||
(current, _group) =>
|
||
current
|
||
.tag_groups
|
||
.find(
|
||
({ "common.ALLTYPES_NAME": name }) =>
|
||
name.toUpperCase() === _group.toUpperCase()
|
||
),
|
||
device
|
||
)
|
||
if (group) {
|
||
const address =
|
||
group
|
||
.tags
|
||
.find(
|
||
({ "common.ALLTYPES_NAME": name }) =>
|
||
name.toUpperCase() === route[route.length -1].toUpperCase()
|
||
)
|
||
if (address) {
|
||
return address['servermain.TAG_ADDRESS']
|
||
} else {
|
||
throw new Error(`no tag found for ${route.join(',')}`)
|
||
}
|
||
} else {
|
||
throw new Error(`no tag group found for ${route.join(',')}`)
|
||
}
|
||
} else {
|
||
throw new Error(`no device ${route[1]} for channel ${route[0]}`)
|
||
}
|
||
|
||
} else {
|
||
throw new Error(`no channnel ${route[0]} for ${route.join(',')}`)
|
||
}
|
||
});
|
||
;// CONCATENATED MODULE: ./src/model/lib/write-table.js
|
||
|
||
|
||
|
||
/* harmony default export */ const write_table = (async (
|
||
title,
|
||
fileName,
|
||
list,
|
||
opc,
|
||
) => {
|
||
if (!list.length) {
|
||
return external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`
|
||
${title}
|
||
|
||
none
|
||
`,
|
||
)
|
||
}
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`
|
||
${title}
|
||
|
||
|name |id |type |engineering units|Server |Path |Address | opc |
|
||
|--------------------|-----|-----------|-----------------|--------------------|--------------------|--------------------|-----------------|
|
||
`
|
||
)
|
||
|
||
for (const { class: _class, parameter } of list) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`|${_class.Name.padEnd(20)}`
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`|${_class.ID.padEnd(5)}`
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`|${_class.Type.padEnd(11)}`
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`|${(_class?.EngineeringUnits || '').padEnd(16)}`
|
||
)
|
||
if (parameter) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`|${parameter.Server.padEnd(20)}`
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`|${parameter.ReadAccessPath.padEnd(20)}`
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`|${parameter.ReadItemName.padEnd(20)}`
|
||
)
|
||
if(opc) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
`|${(get_opc(
|
||
opc,
|
||
parameter.ReadAccessPath,
|
||
parameter.ReadItemName,
|
||
) || '').padEnd(20)}`
|
||
)
|
||
}
|
||
}
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
fileName,
|
||
'\n'
|
||
)
|
||
}
|
||
|
||
});
|
||
;// CONCATENATED MODULE: ./src/model/unit/attributes.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const attributes = (async ({
|
||
areaModel,
|
||
opc,
|
||
unit,
|
||
unitName,
|
||
path,
|
||
}) => {
|
||
const unitAttributes = lib_elements(
|
||
unit,
|
||
'Tag',
|
||
)
|
||
.map(
|
||
({
|
||
elements
|
||
}) => elements,
|
||
)
|
||
.map(get_attributes)
|
||
.map(
|
||
tag => ({
|
||
class: get_attributes(
|
||
lib_elements(
|
||
lib_elements(
|
||
areaModel,
|
||
'AreaModel',
|
||
)[0],
|
||
'TagClass',
|
||
)
|
||
.find(
|
||
(tagClass) => texts(
|
||
tagClass,
|
||
'UniqueName',
|
||
)[0] === tag.TagClass
|
||
).elements,
|
||
),
|
||
parameter: tag,
|
||
})
|
||
).map(
|
||
({
|
||
class: _class,
|
||
parameter
|
||
}) => ({
|
||
class: {
|
||
..._class,
|
||
Name: _class.UniqueName,
|
||
ID: '',
|
||
},
|
||
parameter: {
|
||
...parameter,
|
||
Name: parameter.UniqueName
|
||
}
|
||
})
|
||
)
|
||
|
||
await write_attributes(unit, path)
|
||
|
||
await write_table(
|
||
'## unit attributes',
|
||
path,
|
||
unitAttributes,
|
||
opc,
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
'\n'
|
||
)
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./node_modules/js-yaml/dist/js-yaml.mjs
|
||
|
||
/*! js-yaml 4.1.0 https://github.com/nodeca/js-yaml @license MIT */
|
||
function isNothing(subject) {
|
||
return (typeof subject === 'undefined') || (subject === null);
|
||
}
|
||
|
||
|
||
function isObject(subject) {
|
||
return (typeof subject === 'object') && (subject !== null);
|
||
}
|
||
|
||
|
||
function toArray(sequence) {
|
||
if (Array.isArray(sequence)) return sequence;
|
||
else if (isNothing(sequence)) return [];
|
||
|
||
return [ sequence ];
|
||
}
|
||
|
||
|
||
function extend(target, source) {
|
||
var index, length, key, sourceKeys;
|
||
|
||
if (source) {
|
||
sourceKeys = Object.keys(source);
|
||
|
||
for (index = 0, length = sourceKeys.length; index < length; index += 1) {
|
||
key = sourceKeys[index];
|
||
target[key] = source[key];
|
||
}
|
||
}
|
||
|
||
return target;
|
||
}
|
||
|
||
|
||
function repeat(string, count) {
|
||
var result = '', cycle;
|
||
|
||
for (cycle = 0; cycle < count; cycle += 1) {
|
||
result += string;
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
|
||
function isNegativeZero(number) {
|
||
return (number === 0) && (Number.NEGATIVE_INFINITY === 1 / number);
|
||
}
|
||
|
||
|
||
var isNothing_1 = isNothing;
|
||
var isObject_1 = isObject;
|
||
var toArray_1 = toArray;
|
||
var repeat_1 = repeat;
|
||
var isNegativeZero_1 = isNegativeZero;
|
||
var extend_1 = extend;
|
||
|
||
var common = {
|
||
isNothing: isNothing_1,
|
||
isObject: isObject_1,
|
||
toArray: toArray_1,
|
||
repeat: repeat_1,
|
||
isNegativeZero: isNegativeZero_1,
|
||
extend: extend_1
|
||
};
|
||
|
||
// YAML error class. http://stackoverflow.com/questions/8458984
|
||
|
||
|
||
function formatError(exception, compact) {
|
||
var where = '', message = exception.reason || '(unknown reason)';
|
||
|
||
if (!exception.mark) return message;
|
||
|
||
if (exception.mark.name) {
|
||
where += 'in "' + exception.mark.name + '" ';
|
||
}
|
||
|
||
where += '(' + (exception.mark.line + 1) + ':' + (exception.mark.column + 1) + ')';
|
||
|
||
if (!compact && exception.mark.snippet) {
|
||
where += '\n\n' + exception.mark.snippet;
|
||
}
|
||
|
||
return message + ' ' + where;
|
||
}
|
||
|
||
|
||
function YAMLException$1(reason, mark) {
|
||
// Super constructor
|
||
Error.call(this);
|
||
|
||
this.name = 'YAMLException';
|
||
this.reason = reason;
|
||
this.mark = mark;
|
||
this.message = formatError(this, false);
|
||
|
||
// Include stack trace in error object
|
||
if (Error.captureStackTrace) {
|
||
// Chrome and NodeJS
|
||
Error.captureStackTrace(this, this.constructor);
|
||
} else {
|
||
// FF, IE 10+ and Safari 6+. Fallback for others
|
||
this.stack = (new Error()).stack || '';
|
||
}
|
||
}
|
||
|
||
|
||
// Inherit from Error
|
||
YAMLException$1.prototype = Object.create(Error.prototype);
|
||
YAMLException$1.prototype.constructor = YAMLException$1;
|
||
|
||
|
||
YAMLException$1.prototype.toString = function toString(compact) {
|
||
return this.name + ': ' + formatError(this, compact);
|
||
};
|
||
|
||
|
||
var exception = YAMLException$1;
|
||
|
||
// get snippet for a single line, respecting maxLength
|
||
function getLine(buffer, lineStart, lineEnd, position, maxLineLength) {
|
||
var head = '';
|
||
var tail = '';
|
||
var maxHalfLength = Math.floor(maxLineLength / 2) - 1;
|
||
|
||
if (position - lineStart > maxHalfLength) {
|
||
head = ' ... ';
|
||
lineStart = position - maxHalfLength + head.length;
|
||
}
|
||
|
||
if (lineEnd - position > maxHalfLength) {
|
||
tail = ' ...';
|
||
lineEnd = position + maxHalfLength - tail.length;
|
||
}
|
||
|
||
return {
|
||
str: head + buffer.slice(lineStart, lineEnd).replace(/\t/g, '→') + tail,
|
||
pos: position - lineStart + head.length // relative position
|
||
};
|
||
}
|
||
|
||
|
||
function padStart(string, max) {
|
||
return common.repeat(' ', max - string.length) + string;
|
||
}
|
||
|
||
|
||
function makeSnippet(mark, options) {
|
||
options = Object.create(options || null);
|
||
|
||
if (!mark.buffer) return null;
|
||
|
||
if (!options.maxLength) options.maxLength = 79;
|
||
if (typeof options.indent !== 'number') options.indent = 1;
|
||
if (typeof options.linesBefore !== 'number') options.linesBefore = 3;
|
||
if (typeof options.linesAfter !== 'number') options.linesAfter = 2;
|
||
|
||
var re = /\r?\n|\r|\0/g;
|
||
var lineStarts = [ 0 ];
|
||
var lineEnds = [];
|
||
var match;
|
||
var foundLineNo = -1;
|
||
|
||
while ((match = re.exec(mark.buffer))) {
|
||
lineEnds.push(match.index);
|
||
lineStarts.push(match.index + match[0].length);
|
||
|
||
if (mark.position <= match.index && foundLineNo < 0) {
|
||
foundLineNo = lineStarts.length - 2;
|
||
}
|
||
}
|
||
|
||
if (foundLineNo < 0) foundLineNo = lineStarts.length - 1;
|
||
|
||
var result = '', i, line;
|
||
var lineNoLength = Math.min(mark.line + options.linesAfter, lineEnds.length).toString().length;
|
||
var maxLineLength = options.maxLength - (options.indent + lineNoLength + 3);
|
||
|
||
for (i = 1; i <= options.linesBefore; i++) {
|
||
if (foundLineNo - i < 0) break;
|
||
line = getLine(
|
||
mark.buffer,
|
||
lineStarts[foundLineNo - i],
|
||
lineEnds[foundLineNo - i],
|
||
mark.position - (lineStarts[foundLineNo] - lineStarts[foundLineNo - i]),
|
||
maxLineLength
|
||
);
|
||
result = common.repeat(' ', options.indent) + padStart((mark.line - i + 1).toString(), lineNoLength) +
|
||
' | ' + line.str + '\n' + result;
|
||
}
|
||
|
||
line = getLine(mark.buffer, lineStarts[foundLineNo], lineEnds[foundLineNo], mark.position, maxLineLength);
|
||
result += common.repeat(' ', options.indent) + padStart((mark.line + 1).toString(), lineNoLength) +
|
||
' | ' + line.str + '\n';
|
||
result += common.repeat('-', options.indent + lineNoLength + 3 + line.pos) + '^' + '\n';
|
||
|
||
for (i = 1; i <= options.linesAfter; i++) {
|
||
if (foundLineNo + i >= lineEnds.length) break;
|
||
line = getLine(
|
||
mark.buffer,
|
||
lineStarts[foundLineNo + i],
|
||
lineEnds[foundLineNo + i],
|
||
mark.position - (lineStarts[foundLineNo] - lineStarts[foundLineNo + i]),
|
||
maxLineLength
|
||
);
|
||
result += common.repeat(' ', options.indent) + padStart((mark.line + i + 1).toString(), lineNoLength) +
|
||
' | ' + line.str + '\n';
|
||
}
|
||
|
||
return result.replace(/\n$/, '');
|
||
}
|
||
|
||
|
||
var snippet = makeSnippet;
|
||
|
||
var TYPE_CONSTRUCTOR_OPTIONS = [
|
||
'kind',
|
||
'multi',
|
||
'resolve',
|
||
'construct',
|
||
'instanceOf',
|
||
'predicate',
|
||
'represent',
|
||
'representName',
|
||
'defaultStyle',
|
||
'styleAliases'
|
||
];
|
||
|
||
var YAML_NODE_KINDS = [
|
||
'scalar',
|
||
'sequence',
|
||
'mapping'
|
||
];
|
||
|
||
function compileStyleAliases(map) {
|
||
var result = {};
|
||
|
||
if (map !== null) {
|
||
Object.keys(map).forEach(function (style) {
|
||
map[style].forEach(function (alias) {
|
||
result[String(alias)] = style;
|
||
});
|
||
});
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
function Type$1(tag, options) {
|
||
options = options || {};
|
||
|
||
Object.keys(options).forEach(function (name) {
|
||
if (TYPE_CONSTRUCTOR_OPTIONS.indexOf(name) === -1) {
|
||
throw new exception('Unknown option "' + name + '" is met in definition of "' + tag + '" YAML type.');
|
||
}
|
||
});
|
||
|
||
// TODO: Add tag format check.
|
||
this.options = options; // keep original options in case user wants to extend this type later
|
||
this.tag = tag;
|
||
this.kind = options['kind'] || null;
|
||
this.resolve = options['resolve'] || function () { return true; };
|
||
this.construct = options['construct'] || function (data) { return data; };
|
||
this.instanceOf = options['instanceOf'] || null;
|
||
this.predicate = options['predicate'] || null;
|
||
this.represent = options['represent'] || null;
|
||
this.representName = options['representName'] || null;
|
||
this.defaultStyle = options['defaultStyle'] || null;
|
||
this.multi = options['multi'] || false;
|
||
this.styleAliases = compileStyleAliases(options['styleAliases'] || null);
|
||
|
||
if (YAML_NODE_KINDS.indexOf(this.kind) === -1) {
|
||
throw new exception('Unknown kind "' + this.kind + '" is specified for "' + tag + '" YAML type.');
|
||
}
|
||
}
|
||
|
||
var type = Type$1;
|
||
|
||
/*eslint-disable max-len*/
|
||
|
||
|
||
|
||
|
||
|
||
function compileList(schema, name) {
|
||
var result = [];
|
||
|
||
schema[name].forEach(function (currentType) {
|
||
var newIndex = result.length;
|
||
|
||
result.forEach(function (previousType, previousIndex) {
|
||
if (previousType.tag === currentType.tag &&
|
||
previousType.kind === currentType.kind &&
|
||
previousType.multi === currentType.multi) {
|
||
|
||
newIndex = previousIndex;
|
||
}
|
||
});
|
||
|
||
result[newIndex] = currentType;
|
||
});
|
||
|
||
return result;
|
||
}
|
||
|
||
|
||
function compileMap(/* lists... */) {
|
||
var result = {
|
||
scalar: {},
|
||
sequence: {},
|
||
mapping: {},
|
||
fallback: {},
|
||
multi: {
|
||
scalar: [],
|
||
sequence: [],
|
||
mapping: [],
|
||
fallback: []
|
||
}
|
||
}, index, length;
|
||
|
||
function collectType(type) {
|
||
if (type.multi) {
|
||
result.multi[type.kind].push(type);
|
||
result.multi['fallback'].push(type);
|
||
} else {
|
||
result[type.kind][type.tag] = result['fallback'][type.tag] = type;
|
||
}
|
||
}
|
||
|
||
for (index = 0, length = arguments.length; index < length; index += 1) {
|
||
arguments[index].forEach(collectType);
|
||
}
|
||
return result;
|
||
}
|
||
|
||
|
||
function Schema$1(definition) {
|
||
return this.extend(definition);
|
||
}
|
||
|
||
|
||
Schema$1.prototype.extend = function extend(definition) {
|
||
var implicit = [];
|
||
var explicit = [];
|
||
|
||
if (definition instanceof type) {
|
||
// Schema.extend(type)
|
||
explicit.push(definition);
|
||
|
||
} else if (Array.isArray(definition)) {
|
||
// Schema.extend([ type1, type2, ... ])
|
||
explicit = explicit.concat(definition);
|
||
|
||
} else if (definition && (Array.isArray(definition.implicit) || Array.isArray(definition.explicit))) {
|
||
// Schema.extend({ explicit: [ type1, type2, ... ], implicit: [ type1, type2, ... ] })
|
||
if (definition.implicit) implicit = implicit.concat(definition.implicit);
|
||
if (definition.explicit) explicit = explicit.concat(definition.explicit);
|
||
|
||
} else {
|
||
throw new exception('Schema.extend argument should be a Type, [ Type ], ' +
|
||
'or a schema definition ({ implicit: [...], explicit: [...] })');
|
||
}
|
||
|
||
implicit.forEach(function (type$1) {
|
||
if (!(type$1 instanceof type)) {
|
||
throw new exception('Specified list of YAML types (or a single Type object) contains a non-Type object.');
|
||
}
|
||
|
||
if (type$1.loadKind && type$1.loadKind !== 'scalar') {
|
||
throw new exception('There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported.');
|
||
}
|
||
|
||
if (type$1.multi) {
|
||
throw new exception('There is a multi type in the implicit list of a schema. Multi tags can only be listed as explicit.');
|
||
}
|
||
});
|
||
|
||
explicit.forEach(function (type$1) {
|
||
if (!(type$1 instanceof type)) {
|
||
throw new exception('Specified list of YAML types (or a single Type object) contains a non-Type object.');
|
||
}
|
||
});
|
||
|
||
var result = Object.create(Schema$1.prototype);
|
||
|
||
result.implicit = (this.implicit || []).concat(implicit);
|
||
result.explicit = (this.explicit || []).concat(explicit);
|
||
|
||
result.compiledImplicit = compileList(result, 'implicit');
|
||
result.compiledExplicit = compileList(result, 'explicit');
|
||
result.compiledTypeMap = compileMap(result.compiledImplicit, result.compiledExplicit);
|
||
|
||
return result;
|
||
};
|
||
|
||
|
||
var schema = Schema$1;
|
||
|
||
var str = new type('tag:yaml.org,2002:str', {
|
||
kind: 'scalar',
|
||
construct: function (data) { return data !== null ? data : ''; }
|
||
});
|
||
|
||
var seq = new type('tag:yaml.org,2002:seq', {
|
||
kind: 'sequence',
|
||
construct: function (data) { return data !== null ? data : []; }
|
||
});
|
||
|
||
var map = new type('tag:yaml.org,2002:map', {
|
||
kind: 'mapping',
|
||
construct: function (data) { return data !== null ? data : {}; }
|
||
});
|
||
|
||
var failsafe = new schema({
|
||
explicit: [
|
||
str,
|
||
seq,
|
||
map
|
||
]
|
||
});
|
||
|
||
function resolveYamlNull(data) {
|
||
if (data === null) return true;
|
||
|
||
var max = data.length;
|
||
|
||
return (max === 1 && data === '~') ||
|
||
(max === 4 && (data === 'null' || data === 'Null' || data === 'NULL'));
|
||
}
|
||
|
||
function constructYamlNull() {
|
||
return null;
|
||
}
|
||
|
||
function isNull(object) {
|
||
return object === null;
|
||
}
|
||
|
||
var _null = new type('tag:yaml.org,2002:null', {
|
||
kind: 'scalar',
|
||
resolve: resolveYamlNull,
|
||
construct: constructYamlNull,
|
||
predicate: isNull,
|
||
represent: {
|
||
canonical: function () { return '~'; },
|
||
lowercase: function () { return 'null'; },
|
||
uppercase: function () { return 'NULL'; },
|
||
camelcase: function () { return 'Null'; },
|
||
empty: function () { return ''; }
|
||
},
|
||
defaultStyle: 'lowercase'
|
||
});
|
||
|
||
function resolveYamlBoolean(data) {
|
||
if (data === null) return false;
|
||
|
||
var max = data.length;
|
||
|
||
return (max === 4 && (data === 'true' || data === 'True' || data === 'TRUE')) ||
|
||
(max === 5 && (data === 'false' || data === 'False' || data === 'FALSE'));
|
||
}
|
||
|
||
function constructYamlBoolean(data) {
|
||
return data === 'true' ||
|
||
data === 'True' ||
|
||
data === 'TRUE';
|
||
}
|
||
|
||
function isBoolean(object) {
|
||
return Object.prototype.toString.call(object) === '[object Boolean]';
|
||
}
|
||
|
||
var bool = new type('tag:yaml.org,2002:bool', {
|
||
kind: 'scalar',
|
||
resolve: resolveYamlBoolean,
|
||
construct: constructYamlBoolean,
|
||
predicate: isBoolean,
|
||
represent: {
|
||
lowercase: function (object) { return object ? 'true' : 'false'; },
|
||
uppercase: function (object) { return object ? 'TRUE' : 'FALSE'; },
|
||
camelcase: function (object) { return object ? 'True' : 'False'; }
|
||
},
|
||
defaultStyle: 'lowercase'
|
||
});
|
||
|
||
function isHexCode(c) {
|
||
return ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) ||
|
||
((0x41/* A */ <= c) && (c <= 0x46/* F */)) ||
|
||
((0x61/* a */ <= c) && (c <= 0x66/* f */));
|
||
}
|
||
|
||
function isOctCode(c) {
|
||
return ((0x30/* 0 */ <= c) && (c <= 0x37/* 7 */));
|
||
}
|
||
|
||
function isDecCode(c) {
|
||
return ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */));
|
||
}
|
||
|
||
function resolveYamlInteger(data) {
|
||
if (data === null) return false;
|
||
|
||
var max = data.length,
|
||
index = 0,
|
||
hasDigits = false,
|
||
ch;
|
||
|
||
if (!max) return false;
|
||
|
||
ch = data[index];
|
||
|
||
// sign
|
||
if (ch === '-' || ch === '+') {
|
||
ch = data[++index];
|
||
}
|
||
|
||
if (ch === '0') {
|
||
// 0
|
||
if (index + 1 === max) return true;
|
||
ch = data[++index];
|
||
|
||
// base 2, base 8, base 16
|
||
|
||
if (ch === 'b') {
|
||
// base 2
|
||
index++;
|
||
|
||
for (; index < max; index++) {
|
||
ch = data[index];
|
||
if (ch === '_') continue;
|
||
if (ch !== '0' && ch !== '1') return false;
|
||
hasDigits = true;
|
||
}
|
||
return hasDigits && ch !== '_';
|
||
}
|
||
|
||
|
||
if (ch === 'x') {
|
||
// base 16
|
||
index++;
|
||
|
||
for (; index < max; index++) {
|
||
ch = data[index];
|
||
if (ch === '_') continue;
|
||
if (!isHexCode(data.charCodeAt(index))) return false;
|
||
hasDigits = true;
|
||
}
|
||
return hasDigits && ch !== '_';
|
||
}
|
||
|
||
|
||
if (ch === 'o') {
|
||
// base 8
|
||
index++;
|
||
|
||
for (; index < max; index++) {
|
||
ch = data[index];
|
||
if (ch === '_') continue;
|
||
if (!isOctCode(data.charCodeAt(index))) return false;
|
||
hasDigits = true;
|
||
}
|
||
return hasDigits && ch !== '_';
|
||
}
|
||
}
|
||
|
||
// base 10 (except 0)
|
||
|
||
// value should not start with `_`;
|
||
if (ch === '_') return false;
|
||
|
||
for (; index < max; index++) {
|
||
ch = data[index];
|
||
if (ch === '_') continue;
|
||
if (!isDecCode(data.charCodeAt(index))) {
|
||
return false;
|
||
}
|
||
hasDigits = true;
|
||
}
|
||
|
||
// Should have digits and should not end with `_`
|
||
if (!hasDigits || ch === '_') return false;
|
||
|
||
return true;
|
||
}
|
||
|
||
function constructYamlInteger(data) {
|
||
var value = data, sign = 1, ch;
|
||
|
||
if (value.indexOf('_') !== -1) {
|
||
value = value.replace(/_/g, '');
|
||
}
|
||
|
||
ch = value[0];
|
||
|
||
if (ch === '-' || ch === '+') {
|
||
if (ch === '-') sign = -1;
|
||
value = value.slice(1);
|
||
ch = value[0];
|
||
}
|
||
|
||
if (value === '0') return 0;
|
||
|
||
if (ch === '0') {
|
||
if (value[1] === 'b') return sign * parseInt(value.slice(2), 2);
|
||
if (value[1] === 'x') return sign * parseInt(value.slice(2), 16);
|
||
if (value[1] === 'o') return sign * parseInt(value.slice(2), 8);
|
||
}
|
||
|
||
return sign * parseInt(value, 10);
|
||
}
|
||
|
||
function isInteger(object) {
|
||
return (Object.prototype.toString.call(object)) === '[object Number]' &&
|
||
(object % 1 === 0 && !common.isNegativeZero(object));
|
||
}
|
||
|
||
var js_yaml_int = new type('tag:yaml.org,2002:int', {
|
||
kind: 'scalar',
|
||
resolve: resolveYamlInteger,
|
||
construct: constructYamlInteger,
|
||
predicate: isInteger,
|
||
represent: {
|
||
binary: function (obj) { return obj >= 0 ? '0b' + obj.toString(2) : '-0b' + obj.toString(2).slice(1); },
|
||
octal: function (obj) { return obj >= 0 ? '0o' + obj.toString(8) : '-0o' + obj.toString(8).slice(1); },
|
||
decimal: function (obj) { return obj.toString(10); },
|
||
/* eslint-disable max-len */
|
||
hexadecimal: function (obj) { return obj >= 0 ? '0x' + obj.toString(16).toUpperCase() : '-0x' + obj.toString(16).toUpperCase().slice(1); }
|
||
},
|
||
defaultStyle: 'decimal',
|
||
styleAliases: {
|
||
binary: [ 2, 'bin' ],
|
||
octal: [ 8, 'oct' ],
|
||
decimal: [ 10, 'dec' ],
|
||
hexadecimal: [ 16, 'hex' ]
|
||
}
|
||
});
|
||
|
||
var YAML_FLOAT_PATTERN = new RegExp(
|
||
// 2.5e4, 2.5 and integers
|
||
'^(?:[-+]?(?:[0-9][0-9_]*)(?:\\.[0-9_]*)?(?:[eE][-+]?[0-9]+)?' +
|
||
// .2e4, .2
|
||
// special case, seems not from spec
|
||
'|\\.[0-9_]+(?:[eE][-+]?[0-9]+)?' +
|
||
// .inf
|
||
'|[-+]?\\.(?:inf|Inf|INF)' +
|
||
// .nan
|
||
'|\\.(?:nan|NaN|NAN))$');
|
||
|
||
function resolveYamlFloat(data) {
|
||
if (data === null) return false;
|
||
|
||
if (!YAML_FLOAT_PATTERN.test(data) ||
|
||
// Quick hack to not allow integers end with `_`
|
||
// Probably should update regexp & check speed
|
||
data[data.length - 1] === '_') {
|
||
return false;
|
||
}
|
||
|
||
return true;
|
||
}
|
||
|
||
function constructYamlFloat(data) {
|
||
var value, sign;
|
||
|
||
value = data.replace(/_/g, '').toLowerCase();
|
||
sign = value[0] === '-' ? -1 : 1;
|
||
|
||
if ('+-'.indexOf(value[0]) >= 0) {
|
||
value = value.slice(1);
|
||
}
|
||
|
||
if (value === '.inf') {
|
||
return (sign === 1) ? Number.POSITIVE_INFINITY : Number.NEGATIVE_INFINITY;
|
||
|
||
} else if (value === '.nan') {
|
||
return NaN;
|
||
}
|
||
return sign * parseFloat(value, 10);
|
||
}
|
||
|
||
|
||
var SCIENTIFIC_WITHOUT_DOT = /^[-+]?[0-9]+e/;
|
||
|
||
function representYamlFloat(object, style) {
|
||
var res;
|
||
|
||
if (isNaN(object)) {
|
||
switch (style) {
|
||
case 'lowercase': return '.nan';
|
||
case 'uppercase': return '.NAN';
|
||
case 'camelcase': return '.NaN';
|
||
}
|
||
} else if (Number.POSITIVE_INFINITY === object) {
|
||
switch (style) {
|
||
case 'lowercase': return '.inf';
|
||
case 'uppercase': return '.INF';
|
||
case 'camelcase': return '.Inf';
|
||
}
|
||
} else if (Number.NEGATIVE_INFINITY === object) {
|
||
switch (style) {
|
||
case 'lowercase': return '-.inf';
|
||
case 'uppercase': return '-.INF';
|
||
case 'camelcase': return '-.Inf';
|
||
}
|
||
} else if (common.isNegativeZero(object)) {
|
||
return '-0.0';
|
||
}
|
||
|
||
res = object.toString(10);
|
||
|
||
// JS stringifier can build scientific format without dots: 5e-100,
|
||
// while YAML requres dot: 5.e-100. Fix it with simple hack
|
||
|
||
return SCIENTIFIC_WITHOUT_DOT.test(res) ? res.replace('e', '.e') : res;
|
||
}
|
||
|
||
function isFloat(object) {
|
||
return (Object.prototype.toString.call(object) === '[object Number]') &&
|
||
(object % 1 !== 0 || common.isNegativeZero(object));
|
||
}
|
||
|
||
var js_yaml_float = new type('tag:yaml.org,2002:float', {
|
||
kind: 'scalar',
|
||
resolve: resolveYamlFloat,
|
||
construct: constructYamlFloat,
|
||
predicate: isFloat,
|
||
represent: representYamlFloat,
|
||
defaultStyle: 'lowercase'
|
||
});
|
||
|
||
var json = failsafe.extend({
|
||
implicit: [
|
||
_null,
|
||
bool,
|
||
js_yaml_int,
|
||
js_yaml_float
|
||
]
|
||
});
|
||
|
||
var core = json;
|
||
|
||
var YAML_DATE_REGEXP = new RegExp(
|
||
'^([0-9][0-9][0-9][0-9])' + // [1] year
|
||
'-([0-9][0-9])' + // [2] month
|
||
'-([0-9][0-9])$'); // [3] day
|
||
|
||
var YAML_TIMESTAMP_REGEXP = new RegExp(
|
||
'^([0-9][0-9][0-9][0-9])' + // [1] year
|
||
'-([0-9][0-9]?)' + // [2] month
|
||
'-([0-9][0-9]?)' + // [3] day
|
||
'(?:[Tt]|[ \\t]+)' + // ...
|
||
'([0-9][0-9]?)' + // [4] hour
|
||
':([0-9][0-9])' + // [5] minute
|
||
':([0-9][0-9])' + // [6] second
|
||
'(?:\\.([0-9]*))?' + // [7] fraction
|
||
'(?:[ \\t]*(Z|([-+])([0-9][0-9]?)' + // [8] tz [9] tz_sign [10] tz_hour
|
||
'(?::([0-9][0-9]))?))?$'); // [11] tz_minute
|
||
|
||
function resolveYamlTimestamp(data) {
|
||
if (data === null) return false;
|
||
if (YAML_DATE_REGEXP.exec(data) !== null) return true;
|
||
if (YAML_TIMESTAMP_REGEXP.exec(data) !== null) return true;
|
||
return false;
|
||
}
|
||
|
||
function constructYamlTimestamp(data) {
|
||
var match, year, month, day, hour, minute, second, fraction = 0,
|
||
delta = null, tz_hour, tz_minute, date;
|
||
|
||
match = YAML_DATE_REGEXP.exec(data);
|
||
if (match === null) match = YAML_TIMESTAMP_REGEXP.exec(data);
|
||
|
||
if (match === null) throw new Error('Date resolve error');
|
||
|
||
// match: [1] year [2] month [3] day
|
||
|
||
year = +(match[1]);
|
||
month = +(match[2]) - 1; // JS month starts with 0
|
||
day = +(match[3]);
|
||
|
||
if (!match[4]) { // no hour
|
||
return new Date(Date.UTC(year, month, day));
|
||
}
|
||
|
||
// match: [4] hour [5] minute [6] second [7] fraction
|
||
|
||
hour = +(match[4]);
|
||
minute = +(match[5]);
|
||
second = +(match[6]);
|
||
|
||
if (match[7]) {
|
||
fraction = match[7].slice(0, 3);
|
||
while (fraction.length < 3) { // milli-seconds
|
||
fraction += '0';
|
||
}
|
||
fraction = +fraction;
|
||
}
|
||
|
||
// match: [8] tz [9] tz_sign [10] tz_hour [11] tz_minute
|
||
|
||
if (match[9]) {
|
||
tz_hour = +(match[10]);
|
||
tz_minute = +(match[11] || 0);
|
||
delta = (tz_hour * 60 + tz_minute) * 60000; // delta in mili-seconds
|
||
if (match[9] === '-') delta = -delta;
|
||
}
|
||
|
||
date = new Date(Date.UTC(year, month, day, hour, minute, second, fraction));
|
||
|
||
if (delta) date.setTime(date.getTime() - delta);
|
||
|
||
return date;
|
||
}
|
||
|
||
function representYamlTimestamp(object /*, style*/) {
|
||
return object.toISOString();
|
||
}
|
||
|
||
var timestamp = new type('tag:yaml.org,2002:timestamp', {
|
||
kind: 'scalar',
|
||
resolve: resolveYamlTimestamp,
|
||
construct: constructYamlTimestamp,
|
||
instanceOf: Date,
|
||
represent: representYamlTimestamp
|
||
});
|
||
|
||
function resolveYamlMerge(data) {
|
||
return data === '<<' || data === null;
|
||
}
|
||
|
||
var merge = new type('tag:yaml.org,2002:merge', {
|
||
kind: 'scalar',
|
||
resolve: resolveYamlMerge
|
||
});
|
||
|
||
/*eslint-disable no-bitwise*/
|
||
|
||
|
||
|
||
|
||
|
||
// [ 64, 65, 66 ] -> [ padding, CR, LF ]
|
||
var BASE64_MAP = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\n\r';
|
||
|
||
|
||
function resolveYamlBinary(data) {
|
||
if (data === null) return false;
|
||
|
||
var code, idx, bitlen = 0, max = data.length, map = BASE64_MAP;
|
||
|
||
// Convert one by one.
|
||
for (idx = 0; idx < max; idx++) {
|
||
code = map.indexOf(data.charAt(idx));
|
||
|
||
// Skip CR/LF
|
||
if (code > 64) continue;
|
||
|
||
// Fail on illegal characters
|
||
if (code < 0) return false;
|
||
|
||
bitlen += 6;
|
||
}
|
||
|
||
// If there are any bits left, source was corrupted
|
||
return (bitlen % 8) === 0;
|
||
}
|
||
|
||
function constructYamlBinary(data) {
|
||
var idx, tailbits,
|
||
input = data.replace(/[\r\n=]/g, ''), // remove CR/LF & padding to simplify scan
|
||
max = input.length,
|
||
map = BASE64_MAP,
|
||
bits = 0,
|
||
result = [];
|
||
|
||
// Collect by 6*4 bits (3 bytes)
|
||
|
||
for (idx = 0; idx < max; idx++) {
|
||
if ((idx % 4 === 0) && idx) {
|
||
result.push((bits >> 16) & 0xFF);
|
||
result.push((bits >> 8) & 0xFF);
|
||
result.push(bits & 0xFF);
|
||
}
|
||
|
||
bits = (bits << 6) | map.indexOf(input.charAt(idx));
|
||
}
|
||
|
||
// Dump tail
|
||
|
||
tailbits = (max % 4) * 6;
|
||
|
||
if (tailbits === 0) {
|
||
result.push((bits >> 16) & 0xFF);
|
||
result.push((bits >> 8) & 0xFF);
|
||
result.push(bits & 0xFF);
|
||
} else if (tailbits === 18) {
|
||
result.push((bits >> 10) & 0xFF);
|
||
result.push((bits >> 2) & 0xFF);
|
||
} else if (tailbits === 12) {
|
||
result.push((bits >> 4) & 0xFF);
|
||
}
|
||
|
||
return new Uint8Array(result);
|
||
}
|
||
|
||
function representYamlBinary(object /*, style*/) {
|
||
var result = '', bits = 0, idx, tail,
|
||
max = object.length,
|
||
map = BASE64_MAP;
|
||
|
||
// Convert every three bytes to 4 ASCII characters.
|
||
|
||
for (idx = 0; idx < max; idx++) {
|
||
if ((idx % 3 === 0) && idx) {
|
||
result += map[(bits >> 18) & 0x3F];
|
||
result += map[(bits >> 12) & 0x3F];
|
||
result += map[(bits >> 6) & 0x3F];
|
||
result += map[bits & 0x3F];
|
||
}
|
||
|
||
bits = (bits << 8) + object[idx];
|
||
}
|
||
|
||
// Dump tail
|
||
|
||
tail = max % 3;
|
||
|
||
if (tail === 0) {
|
||
result += map[(bits >> 18) & 0x3F];
|
||
result += map[(bits >> 12) & 0x3F];
|
||
result += map[(bits >> 6) & 0x3F];
|
||
result += map[bits & 0x3F];
|
||
} else if (tail === 2) {
|
||
result += map[(bits >> 10) & 0x3F];
|
||
result += map[(bits >> 4) & 0x3F];
|
||
result += map[(bits << 2) & 0x3F];
|
||
result += map[64];
|
||
} else if (tail === 1) {
|
||
result += map[(bits >> 2) & 0x3F];
|
||
result += map[(bits << 4) & 0x3F];
|
||
result += map[64];
|
||
result += map[64];
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
function isBinary(obj) {
|
||
return Object.prototype.toString.call(obj) === '[object Uint8Array]';
|
||
}
|
||
|
||
var binary = new type('tag:yaml.org,2002:binary', {
|
||
kind: 'scalar',
|
||
resolve: resolveYamlBinary,
|
||
construct: constructYamlBinary,
|
||
predicate: isBinary,
|
||
represent: representYamlBinary
|
||
});
|
||
|
||
var _hasOwnProperty$3 = Object.prototype.hasOwnProperty;
|
||
var _toString$2 = Object.prototype.toString;
|
||
|
||
function resolveYamlOmap(data) {
|
||
if (data === null) return true;
|
||
|
||
var objectKeys = [], index, length, pair, pairKey, pairHasKey,
|
||
object = data;
|
||
|
||
for (index = 0, length = object.length; index < length; index += 1) {
|
||
pair = object[index];
|
||
pairHasKey = false;
|
||
|
||
if (_toString$2.call(pair) !== '[object Object]') return false;
|
||
|
||
for (pairKey in pair) {
|
||
if (_hasOwnProperty$3.call(pair, pairKey)) {
|
||
if (!pairHasKey) pairHasKey = true;
|
||
else return false;
|
||
}
|
||
}
|
||
|
||
if (!pairHasKey) return false;
|
||
|
||
if (objectKeys.indexOf(pairKey) === -1) objectKeys.push(pairKey);
|
||
else return false;
|
||
}
|
||
|
||
return true;
|
||
}
|
||
|
||
function constructYamlOmap(data) {
|
||
return data !== null ? data : [];
|
||
}
|
||
|
||
var omap = new type('tag:yaml.org,2002:omap', {
|
||
kind: 'sequence',
|
||
resolve: resolveYamlOmap,
|
||
construct: constructYamlOmap
|
||
});
|
||
|
||
var _toString$1 = Object.prototype.toString;
|
||
|
||
function resolveYamlPairs(data) {
|
||
if (data === null) return true;
|
||
|
||
var index, length, pair, keys, result,
|
||
object = data;
|
||
|
||
result = new Array(object.length);
|
||
|
||
for (index = 0, length = object.length; index < length; index += 1) {
|
||
pair = object[index];
|
||
|
||
if (_toString$1.call(pair) !== '[object Object]') return false;
|
||
|
||
keys = Object.keys(pair);
|
||
|
||
if (keys.length !== 1) return false;
|
||
|
||
result[index] = [ keys[0], pair[keys[0]] ];
|
||
}
|
||
|
||
return true;
|
||
}
|
||
|
||
function constructYamlPairs(data) {
|
||
if (data === null) return [];
|
||
|
||
var index, length, pair, keys, result,
|
||
object = data;
|
||
|
||
result = new Array(object.length);
|
||
|
||
for (index = 0, length = object.length; index < length; index += 1) {
|
||
pair = object[index];
|
||
|
||
keys = Object.keys(pair);
|
||
|
||
result[index] = [ keys[0], pair[keys[0]] ];
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
var pairs = new type('tag:yaml.org,2002:pairs', {
|
||
kind: 'sequence',
|
||
resolve: resolveYamlPairs,
|
||
construct: constructYamlPairs
|
||
});
|
||
|
||
var _hasOwnProperty$2 = Object.prototype.hasOwnProperty;
|
||
|
||
function resolveYamlSet(data) {
|
||
if (data === null) return true;
|
||
|
||
var key, object = data;
|
||
|
||
for (key in object) {
|
||
if (_hasOwnProperty$2.call(object, key)) {
|
||
if (object[key] !== null) return false;
|
||
}
|
||
}
|
||
|
||
return true;
|
||
}
|
||
|
||
function constructYamlSet(data) {
|
||
return data !== null ? data : {};
|
||
}
|
||
|
||
var set = new type('tag:yaml.org,2002:set', {
|
||
kind: 'mapping',
|
||
resolve: resolveYamlSet,
|
||
construct: constructYamlSet
|
||
});
|
||
|
||
var _default = core.extend({
|
||
implicit: [
|
||
timestamp,
|
||
merge
|
||
],
|
||
explicit: [
|
||
binary,
|
||
omap,
|
||
pairs,
|
||
set
|
||
]
|
||
});
|
||
|
||
/*eslint-disable max-len,no-use-before-define*/
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
var _hasOwnProperty$1 = Object.prototype.hasOwnProperty;
|
||
|
||
|
||
var CONTEXT_FLOW_IN = 1;
|
||
var CONTEXT_FLOW_OUT = 2;
|
||
var CONTEXT_BLOCK_IN = 3;
|
||
var CONTEXT_BLOCK_OUT = 4;
|
||
|
||
|
||
var CHOMPING_CLIP = 1;
|
||
var CHOMPING_STRIP = 2;
|
||
var CHOMPING_KEEP = 3;
|
||
|
||
|
||
var PATTERN_NON_PRINTABLE = /[\x00-\x08\x0B\x0C\x0E-\x1F\x7F-\x84\x86-\x9F\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/;
|
||
var PATTERN_NON_ASCII_LINE_BREAKS = /[\x85\u2028\u2029]/;
|
||
var PATTERN_FLOW_INDICATORS = /[,\[\]\{\}]/;
|
||
var PATTERN_TAG_HANDLE = /^(?:!|!!|![a-z\-]+!)$/i;
|
||
var PATTERN_TAG_URI = /^(?:!|[^,\[\]\{\}])(?:%[0-9a-f]{2}|[0-9a-z\-#;\/\?:@&=\+\$,_\.!~\*'\(\)\[\]])*$/i;
|
||
|
||
|
||
function _class(obj) { return Object.prototype.toString.call(obj); }
|
||
|
||
function is_EOL(c) {
|
||
return (c === 0x0A/* LF */) || (c === 0x0D/* CR */);
|
||
}
|
||
|
||
function is_WHITE_SPACE(c) {
|
||
return (c === 0x09/* Tab */) || (c === 0x20/* Space */);
|
||
}
|
||
|
||
function is_WS_OR_EOL(c) {
|
||
return (c === 0x09/* Tab */) ||
|
||
(c === 0x20/* Space */) ||
|
||
(c === 0x0A/* LF */) ||
|
||
(c === 0x0D/* CR */);
|
||
}
|
||
|
||
function is_FLOW_INDICATOR(c) {
|
||
return c === 0x2C/* , */ ||
|
||
c === 0x5B/* [ */ ||
|
||
c === 0x5D/* ] */ ||
|
||
c === 0x7B/* { */ ||
|
||
c === 0x7D/* } */;
|
||
}
|
||
|
||
function fromHexCode(c) {
|
||
var lc;
|
||
|
||
if ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) {
|
||
return c - 0x30;
|
||
}
|
||
|
||
/*eslint-disable no-bitwise*/
|
||
lc = c | 0x20;
|
||
|
||
if ((0x61/* a */ <= lc) && (lc <= 0x66/* f */)) {
|
||
return lc - 0x61 + 10;
|
||
}
|
||
|
||
return -1;
|
||
}
|
||
|
||
function escapedHexLen(c) {
|
||
if (c === 0x78/* x */) { return 2; }
|
||
if (c === 0x75/* u */) { return 4; }
|
||
if (c === 0x55/* U */) { return 8; }
|
||
return 0;
|
||
}
|
||
|
||
function fromDecimalCode(c) {
|
||
if ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) {
|
||
return c - 0x30;
|
||
}
|
||
|
||
return -1;
|
||
}
|
||
|
||
function simpleEscapeSequence(c) {
|
||
/* eslint-disable indent */
|
||
return (c === 0x30/* 0 */) ? '\x00' :
|
||
(c === 0x61/* a */) ? '\x07' :
|
||
(c === 0x62/* b */) ? '\x08' :
|
||
(c === 0x74/* t */) ? '\x09' :
|
||
(c === 0x09/* Tab */) ? '\x09' :
|
||
(c === 0x6E/* n */) ? '\x0A' :
|
||
(c === 0x76/* v */) ? '\x0B' :
|
||
(c === 0x66/* f */) ? '\x0C' :
|
||
(c === 0x72/* r */) ? '\x0D' :
|
||
(c === 0x65/* e */) ? '\x1B' :
|
||
(c === 0x20/* Space */) ? ' ' :
|
||
(c === 0x22/* " */) ? '\x22' :
|
||
(c === 0x2F/* / */) ? '/' :
|
||
(c === 0x5C/* \ */) ? '\x5C' :
|
||
(c === 0x4E/* N */) ? '\x85' :
|
||
(c === 0x5F/* _ */) ? '\xA0' :
|
||
(c === 0x4C/* L */) ? '\u2028' :
|
||
(c === 0x50/* P */) ? '\u2029' : '';
|
||
}
|
||
|
||
function charFromCodepoint(c) {
|
||
if (c <= 0xFFFF) {
|
||
return String.fromCharCode(c);
|
||
}
|
||
// Encode UTF-16 surrogate pair
|
||
// https://en.wikipedia.org/wiki/UTF-16#Code_points_U.2B010000_to_U.2B10FFFF
|
||
return String.fromCharCode(
|
||
((c - 0x010000) >> 10) + 0xD800,
|
||
((c - 0x010000) & 0x03FF) + 0xDC00
|
||
);
|
||
}
|
||
|
||
var simpleEscapeCheck = new Array(256); // integer, for fast access
|
||
var simpleEscapeMap = new Array(256);
|
||
for (var i = 0; i < 256; i++) {
|
||
simpleEscapeCheck[i] = simpleEscapeSequence(i) ? 1 : 0;
|
||
simpleEscapeMap[i] = simpleEscapeSequence(i);
|
||
}
|
||
|
||
|
||
function State$1(input, options) {
|
||
this.input = input;
|
||
|
||
this.filename = options['filename'] || null;
|
||
this.schema = options['schema'] || _default;
|
||
this.onWarning = options['onWarning'] || null;
|
||
// (Hidden) Remove? makes the loader to expect YAML 1.1 documents
|
||
// if such documents have no explicit %YAML directive
|
||
this.legacy = options['legacy'] || false;
|
||
|
||
this.json = options['json'] || false;
|
||
this.listener = options['listener'] || null;
|
||
|
||
this.implicitTypes = this.schema.compiledImplicit;
|
||
this.typeMap = this.schema.compiledTypeMap;
|
||
|
||
this.length = input.length;
|
||
this.position = 0;
|
||
this.line = 0;
|
||
this.lineStart = 0;
|
||
this.lineIndent = 0;
|
||
|
||
// position of first leading tab in the current line,
|
||
// used to make sure there are no tabs in the indentation
|
||
this.firstTabInLine = -1;
|
||
|
||
this.documents = [];
|
||
|
||
/*
|
||
this.version;
|
||
this.checkLineBreaks;
|
||
this.tagMap;
|
||
this.anchorMap;
|
||
this.tag;
|
||
this.anchor;
|
||
this.kind;
|
||
this.result;*/
|
||
|
||
}
|
||
|
||
|
||
function generateError(state, message) {
|
||
var mark = {
|
||
name: state.filename,
|
||
buffer: state.input.slice(0, -1), // omit trailing \0
|
||
position: state.position,
|
||
line: state.line,
|
||
column: state.position - state.lineStart
|
||
};
|
||
|
||
mark.snippet = snippet(mark);
|
||
|
||
return new exception(message, mark);
|
||
}
|
||
|
||
function throwError(state, message) {
|
||
throw generateError(state, message);
|
||
}
|
||
|
||
function throwWarning(state, message) {
|
||
if (state.onWarning) {
|
||
state.onWarning.call(null, generateError(state, message));
|
||
}
|
||
}
|
||
|
||
|
||
var directiveHandlers = {
|
||
|
||
YAML: function handleYamlDirective(state, name, args) {
|
||
|
||
var match, major, minor;
|
||
|
||
if (state.version !== null) {
|
||
throwError(state, 'duplication of %YAML directive');
|
||
}
|
||
|
||
if (args.length !== 1) {
|
||
throwError(state, 'YAML directive accepts exactly one argument');
|
||
}
|
||
|
||
match = /^([0-9]+)\.([0-9]+)$/.exec(args[0]);
|
||
|
||
if (match === null) {
|
||
throwError(state, 'ill-formed argument of the YAML directive');
|
||
}
|
||
|
||
major = parseInt(match[1], 10);
|
||
minor = parseInt(match[2], 10);
|
||
|
||
if (major !== 1) {
|
||
throwError(state, 'unacceptable YAML version of the document');
|
||
}
|
||
|
||
state.version = args[0];
|
||
state.checkLineBreaks = (minor < 2);
|
||
|
||
if (minor !== 1 && minor !== 2) {
|
||
throwWarning(state, 'unsupported YAML version of the document');
|
||
}
|
||
},
|
||
|
||
TAG: function handleTagDirective(state, name, args) {
|
||
|
||
var handle, prefix;
|
||
|
||
if (args.length !== 2) {
|
||
throwError(state, 'TAG directive accepts exactly two arguments');
|
||
}
|
||
|
||
handle = args[0];
|
||
prefix = args[1];
|
||
|
||
if (!PATTERN_TAG_HANDLE.test(handle)) {
|
||
throwError(state, 'ill-formed tag handle (first argument) of the TAG directive');
|
||
}
|
||
|
||
if (_hasOwnProperty$1.call(state.tagMap, handle)) {
|
||
throwError(state, 'there is a previously declared suffix for "' + handle + '" tag handle');
|
||
}
|
||
|
||
if (!PATTERN_TAG_URI.test(prefix)) {
|
||
throwError(state, 'ill-formed tag prefix (second argument) of the TAG directive');
|
||
}
|
||
|
||
try {
|
||
prefix = decodeURIComponent(prefix);
|
||
} catch (err) {
|
||
throwError(state, 'tag prefix is malformed: ' + prefix);
|
||
}
|
||
|
||
state.tagMap[handle] = prefix;
|
||
}
|
||
};
|
||
|
||
|
||
function captureSegment(state, start, end, checkJson) {
|
||
var _position, _length, _character, _result;
|
||
|
||
if (start < end) {
|
||
_result = state.input.slice(start, end);
|
||
|
||
if (checkJson) {
|
||
for (_position = 0, _length = _result.length; _position < _length; _position += 1) {
|
||
_character = _result.charCodeAt(_position);
|
||
if (!(_character === 0x09 ||
|
||
(0x20 <= _character && _character <= 0x10FFFF))) {
|
||
throwError(state, 'expected valid JSON character');
|
||
}
|
||
}
|
||
} else if (PATTERN_NON_PRINTABLE.test(_result)) {
|
||
throwError(state, 'the stream contains non-printable characters');
|
||
}
|
||
|
||
state.result += _result;
|
||
}
|
||
}
|
||
|
||
function mergeMappings(state, destination, source, overridableKeys) {
|
||
var sourceKeys, key, index, quantity;
|
||
|
||
if (!common.isObject(source)) {
|
||
throwError(state, 'cannot merge mappings; the provided source object is unacceptable');
|
||
}
|
||
|
||
sourceKeys = Object.keys(source);
|
||
|
||
for (index = 0, quantity = sourceKeys.length; index < quantity; index += 1) {
|
||
key = sourceKeys[index];
|
||
|
||
if (!_hasOwnProperty$1.call(destination, key)) {
|
||
destination[key] = source[key];
|
||
overridableKeys[key] = true;
|
||
}
|
||
}
|
||
}
|
||
|
||
function storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode,
|
||
startLine, startLineStart, startPos) {
|
||
|
||
var index, quantity;
|
||
|
||
// The output is a plain object here, so keys can only be strings.
|
||
// We need to convert keyNode to a string, but doing so can hang the process
|
||
// (deeply nested arrays that explode exponentially using aliases).
|
||
if (Array.isArray(keyNode)) {
|
||
keyNode = Array.prototype.slice.call(keyNode);
|
||
|
||
for (index = 0, quantity = keyNode.length; index < quantity; index += 1) {
|
||
if (Array.isArray(keyNode[index])) {
|
||
throwError(state, 'nested arrays are not supported inside keys');
|
||
}
|
||
|
||
if (typeof keyNode === 'object' && _class(keyNode[index]) === '[object Object]') {
|
||
keyNode[index] = '[object Object]';
|
||
}
|
||
}
|
||
}
|
||
|
||
// Avoid code execution in load() via toString property
|
||
// (still use its own toString for arrays, timestamps,
|
||
// and whatever user schema extensions happen to have @@toStringTag)
|
||
if (typeof keyNode === 'object' && _class(keyNode) === '[object Object]') {
|
||
keyNode = '[object Object]';
|
||
}
|
||
|
||
|
||
keyNode = String(keyNode);
|
||
|
||
if (_result === null) {
|
||
_result = {};
|
||
}
|
||
|
||
if (keyTag === 'tag:yaml.org,2002:merge') {
|
||
if (Array.isArray(valueNode)) {
|
||
for (index = 0, quantity = valueNode.length; index < quantity; index += 1) {
|
||
mergeMappings(state, _result, valueNode[index], overridableKeys);
|
||
}
|
||
} else {
|
||
mergeMappings(state, _result, valueNode, overridableKeys);
|
||
}
|
||
} else {
|
||
if (!state.json &&
|
||
!_hasOwnProperty$1.call(overridableKeys, keyNode) &&
|
||
_hasOwnProperty$1.call(_result, keyNode)) {
|
||
state.line = startLine || state.line;
|
||
state.lineStart = startLineStart || state.lineStart;
|
||
state.position = startPos || state.position;
|
||
throwError(state, 'duplicated mapping key');
|
||
}
|
||
|
||
// used for this specific key only because Object.defineProperty is slow
|
||
if (keyNode === '__proto__') {
|
||
Object.defineProperty(_result, keyNode, {
|
||
configurable: true,
|
||
enumerable: true,
|
||
writable: true,
|
||
value: valueNode
|
||
});
|
||
} else {
|
||
_result[keyNode] = valueNode;
|
||
}
|
||
delete overridableKeys[keyNode];
|
||
}
|
||
|
||
return _result;
|
||
}
|
||
|
||
function readLineBreak(state) {
|
||
var ch;
|
||
|
||
ch = state.input.charCodeAt(state.position);
|
||
|
||
if (ch === 0x0A/* LF */) {
|
||
state.position++;
|
||
} else if (ch === 0x0D/* CR */) {
|
||
state.position++;
|
||
if (state.input.charCodeAt(state.position) === 0x0A/* LF */) {
|
||
state.position++;
|
||
}
|
||
} else {
|
||
throwError(state, 'a line break is expected');
|
||
}
|
||
|
||
state.line += 1;
|
||
state.lineStart = state.position;
|
||
state.firstTabInLine = -1;
|
||
}
|
||
|
||
function skipSeparationSpace(state, allowComments, checkIndent) {
|
||
var lineBreaks = 0,
|
||
ch = state.input.charCodeAt(state.position);
|
||
|
||
while (ch !== 0) {
|
||
while (is_WHITE_SPACE(ch)) {
|
||
if (ch === 0x09/* Tab */ && state.firstTabInLine === -1) {
|
||
state.firstTabInLine = state.position;
|
||
}
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
|
||
if (allowComments && ch === 0x23/* # */) {
|
||
do {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
} while (ch !== 0x0A/* LF */ && ch !== 0x0D/* CR */ && ch !== 0);
|
||
}
|
||
|
||
if (is_EOL(ch)) {
|
||
readLineBreak(state);
|
||
|
||
ch = state.input.charCodeAt(state.position);
|
||
lineBreaks++;
|
||
state.lineIndent = 0;
|
||
|
||
while (ch === 0x20/* Space */) {
|
||
state.lineIndent++;
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
} else {
|
||
break;
|
||
}
|
||
}
|
||
|
||
if (checkIndent !== -1 && lineBreaks !== 0 && state.lineIndent < checkIndent) {
|
||
throwWarning(state, 'deficient indentation');
|
||
}
|
||
|
||
return lineBreaks;
|
||
}
|
||
|
||
function testDocumentSeparator(state) {
|
||
var _position = state.position,
|
||
ch;
|
||
|
||
ch = state.input.charCodeAt(_position);
|
||
|
||
// Condition state.position === state.lineStart is tested
|
||
// in parent on each call, for efficiency. No needs to test here again.
|
||
if ((ch === 0x2D/* - */ || ch === 0x2E/* . */) &&
|
||
ch === state.input.charCodeAt(_position + 1) &&
|
||
ch === state.input.charCodeAt(_position + 2)) {
|
||
|
||
_position += 3;
|
||
|
||
ch = state.input.charCodeAt(_position);
|
||
|
||
if (ch === 0 || is_WS_OR_EOL(ch)) {
|
||
return true;
|
||
}
|
||
}
|
||
|
||
return false;
|
||
}
|
||
|
||
function writeFoldedLines(state, count) {
|
||
if (count === 1) {
|
||
state.result += ' ';
|
||
} else if (count > 1) {
|
||
state.result += common.repeat('\n', count - 1);
|
||
}
|
||
}
|
||
|
||
|
||
function readPlainScalar(state, nodeIndent, withinFlowCollection) {
|
||
var preceding,
|
||
following,
|
||
captureStart,
|
||
captureEnd,
|
||
hasPendingContent,
|
||
_line,
|
||
_lineStart,
|
||
_lineIndent,
|
||
_kind = state.kind,
|
||
_result = state.result,
|
||
ch;
|
||
|
||
ch = state.input.charCodeAt(state.position);
|
||
|
||
if (is_WS_OR_EOL(ch) ||
|
||
is_FLOW_INDICATOR(ch) ||
|
||
ch === 0x23/* # */ ||
|
||
ch === 0x26/* & */ ||
|
||
ch === 0x2A/* * */ ||
|
||
ch === 0x21/* ! */ ||
|
||
ch === 0x7C/* | */ ||
|
||
ch === 0x3E/* > */ ||
|
||
ch === 0x27/* ' */ ||
|
||
ch === 0x22/* " */ ||
|
||
ch === 0x25/* % */ ||
|
||
ch === 0x40/* @ */ ||
|
||
ch === 0x60/* ` */) {
|
||
return false;
|
||
}
|
||
|
||
if (ch === 0x3F/* ? */ || ch === 0x2D/* - */) {
|
||
following = state.input.charCodeAt(state.position + 1);
|
||
|
||
if (is_WS_OR_EOL(following) ||
|
||
withinFlowCollection && is_FLOW_INDICATOR(following)) {
|
||
return false;
|
||
}
|
||
}
|
||
|
||
state.kind = 'scalar';
|
||
state.result = '';
|
||
captureStart = captureEnd = state.position;
|
||
hasPendingContent = false;
|
||
|
||
while (ch !== 0) {
|
||
if (ch === 0x3A/* : */) {
|
||
following = state.input.charCodeAt(state.position + 1);
|
||
|
||
if (is_WS_OR_EOL(following) ||
|
||
withinFlowCollection && is_FLOW_INDICATOR(following)) {
|
||
break;
|
||
}
|
||
|
||
} else if (ch === 0x23/* # */) {
|
||
preceding = state.input.charCodeAt(state.position - 1);
|
||
|
||
if (is_WS_OR_EOL(preceding)) {
|
||
break;
|
||
}
|
||
|
||
} else if ((state.position === state.lineStart && testDocumentSeparator(state)) ||
|
||
withinFlowCollection && is_FLOW_INDICATOR(ch)) {
|
||
break;
|
||
|
||
} else if (is_EOL(ch)) {
|
||
_line = state.line;
|
||
_lineStart = state.lineStart;
|
||
_lineIndent = state.lineIndent;
|
||
skipSeparationSpace(state, false, -1);
|
||
|
||
if (state.lineIndent >= nodeIndent) {
|
||
hasPendingContent = true;
|
||
ch = state.input.charCodeAt(state.position);
|
||
continue;
|
||
} else {
|
||
state.position = captureEnd;
|
||
state.line = _line;
|
||
state.lineStart = _lineStart;
|
||
state.lineIndent = _lineIndent;
|
||
break;
|
||
}
|
||
}
|
||
|
||
if (hasPendingContent) {
|
||
captureSegment(state, captureStart, captureEnd, false);
|
||
writeFoldedLines(state, state.line - _line);
|
||
captureStart = captureEnd = state.position;
|
||
hasPendingContent = false;
|
||
}
|
||
|
||
if (!is_WHITE_SPACE(ch)) {
|
||
captureEnd = state.position + 1;
|
||
}
|
||
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
|
||
captureSegment(state, captureStart, captureEnd, false);
|
||
|
||
if (state.result) {
|
||
return true;
|
||
}
|
||
|
||
state.kind = _kind;
|
||
state.result = _result;
|
||
return false;
|
||
}
|
||
|
||
function readSingleQuotedScalar(state, nodeIndent) {
|
||
var ch,
|
||
captureStart, captureEnd;
|
||
|
||
ch = state.input.charCodeAt(state.position);
|
||
|
||
if (ch !== 0x27/* ' */) {
|
||
return false;
|
||
}
|
||
|
||
state.kind = 'scalar';
|
||
state.result = '';
|
||
state.position++;
|
||
captureStart = captureEnd = state.position;
|
||
|
||
while ((ch = state.input.charCodeAt(state.position)) !== 0) {
|
||
if (ch === 0x27/* ' */) {
|
||
captureSegment(state, captureStart, state.position, true);
|
||
ch = state.input.charCodeAt(++state.position);
|
||
|
||
if (ch === 0x27/* ' */) {
|
||
captureStart = state.position;
|
||
state.position++;
|
||
captureEnd = state.position;
|
||
} else {
|
||
return true;
|
||
}
|
||
|
||
} else if (is_EOL(ch)) {
|
||
captureSegment(state, captureStart, captureEnd, true);
|
||
writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent));
|
||
captureStart = captureEnd = state.position;
|
||
|
||
} else if (state.position === state.lineStart && testDocumentSeparator(state)) {
|
||
throwError(state, 'unexpected end of the document within a single quoted scalar');
|
||
|
||
} else {
|
||
state.position++;
|
||
captureEnd = state.position;
|
||
}
|
||
}
|
||
|
||
throwError(state, 'unexpected end of the stream within a single quoted scalar');
|
||
}
|
||
|
||
function readDoubleQuotedScalar(state, nodeIndent) {
|
||
var captureStart,
|
||
captureEnd,
|
||
hexLength,
|
||
hexResult,
|
||
tmp,
|
||
ch;
|
||
|
||
ch = state.input.charCodeAt(state.position);
|
||
|
||
if (ch !== 0x22/* " */) {
|
||
return false;
|
||
}
|
||
|
||
state.kind = 'scalar';
|
||
state.result = '';
|
||
state.position++;
|
||
captureStart = captureEnd = state.position;
|
||
|
||
while ((ch = state.input.charCodeAt(state.position)) !== 0) {
|
||
if (ch === 0x22/* " */) {
|
||
captureSegment(state, captureStart, state.position, true);
|
||
state.position++;
|
||
return true;
|
||
|
||
} else if (ch === 0x5C/* \ */) {
|
||
captureSegment(state, captureStart, state.position, true);
|
||
ch = state.input.charCodeAt(++state.position);
|
||
|
||
if (is_EOL(ch)) {
|
||
skipSeparationSpace(state, false, nodeIndent);
|
||
|
||
// TODO: rework to inline fn with no type cast?
|
||
} else if (ch < 256 && simpleEscapeCheck[ch]) {
|
||
state.result += simpleEscapeMap[ch];
|
||
state.position++;
|
||
|
||
} else if ((tmp = escapedHexLen(ch)) > 0) {
|
||
hexLength = tmp;
|
||
hexResult = 0;
|
||
|
||
for (; hexLength > 0; hexLength--) {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
|
||
if ((tmp = fromHexCode(ch)) >= 0) {
|
||
hexResult = (hexResult << 4) + tmp;
|
||
|
||
} else {
|
||
throwError(state, 'expected hexadecimal character');
|
||
}
|
||
}
|
||
|
||
state.result += charFromCodepoint(hexResult);
|
||
|
||
state.position++;
|
||
|
||
} else {
|
||
throwError(state, 'unknown escape sequence');
|
||
}
|
||
|
||
captureStart = captureEnd = state.position;
|
||
|
||
} else if (is_EOL(ch)) {
|
||
captureSegment(state, captureStart, captureEnd, true);
|
||
writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent));
|
||
captureStart = captureEnd = state.position;
|
||
|
||
} else if (state.position === state.lineStart && testDocumentSeparator(state)) {
|
||
throwError(state, 'unexpected end of the document within a double quoted scalar');
|
||
|
||
} else {
|
||
state.position++;
|
||
captureEnd = state.position;
|
||
}
|
||
}
|
||
|
||
throwError(state, 'unexpected end of the stream within a double quoted scalar');
|
||
}
|
||
|
||
function readFlowCollection(state, nodeIndent) {
|
||
var readNext = true,
|
||
_line,
|
||
_lineStart,
|
||
_pos,
|
||
_tag = state.tag,
|
||
_result,
|
||
_anchor = state.anchor,
|
||
following,
|
||
terminator,
|
||
isPair,
|
||
isExplicitPair,
|
||
isMapping,
|
||
overridableKeys = Object.create(null),
|
||
keyNode,
|
||
keyTag,
|
||
valueNode,
|
||
ch;
|
||
|
||
ch = state.input.charCodeAt(state.position);
|
||
|
||
if (ch === 0x5B/* [ */) {
|
||
terminator = 0x5D;/* ] */
|
||
isMapping = false;
|
||
_result = [];
|
||
} else if (ch === 0x7B/* { */) {
|
||
terminator = 0x7D;/* } */
|
||
isMapping = true;
|
||
_result = {};
|
||
} else {
|
||
return false;
|
||
}
|
||
|
||
if (state.anchor !== null) {
|
||
state.anchorMap[state.anchor] = _result;
|
||
}
|
||
|
||
ch = state.input.charCodeAt(++state.position);
|
||
|
||
while (ch !== 0) {
|
||
skipSeparationSpace(state, true, nodeIndent);
|
||
|
||
ch = state.input.charCodeAt(state.position);
|
||
|
||
if (ch === terminator) {
|
||
state.position++;
|
||
state.tag = _tag;
|
||
state.anchor = _anchor;
|
||
state.kind = isMapping ? 'mapping' : 'sequence';
|
||
state.result = _result;
|
||
return true;
|
||
} else if (!readNext) {
|
||
throwError(state, 'missed comma between flow collection entries');
|
||
} else if (ch === 0x2C/* , */) {
|
||
// "flow collection entries can never be completely empty", as per YAML 1.2, section 7.4
|
||
throwError(state, "expected the node content, but found ','");
|
||
}
|
||
|
||
keyTag = keyNode = valueNode = null;
|
||
isPair = isExplicitPair = false;
|
||
|
||
if (ch === 0x3F/* ? */) {
|
||
following = state.input.charCodeAt(state.position + 1);
|
||
|
||
if (is_WS_OR_EOL(following)) {
|
||
isPair = isExplicitPair = true;
|
||
state.position++;
|
||
skipSeparationSpace(state, true, nodeIndent);
|
||
}
|
||
}
|
||
|
||
_line = state.line; // Save the current line.
|
||
_lineStart = state.lineStart;
|
||
_pos = state.position;
|
||
composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true);
|
||
keyTag = state.tag;
|
||
keyNode = state.result;
|
||
skipSeparationSpace(state, true, nodeIndent);
|
||
|
||
ch = state.input.charCodeAt(state.position);
|
||
|
||
if ((isExplicitPair || state.line === _line) && ch === 0x3A/* : */) {
|
||
isPair = true;
|
||
ch = state.input.charCodeAt(++state.position);
|
||
skipSeparationSpace(state, true, nodeIndent);
|
||
composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true);
|
||
valueNode = state.result;
|
||
}
|
||
|
||
if (isMapping) {
|
||
storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _line, _lineStart, _pos);
|
||
} else if (isPair) {
|
||
_result.push(storeMappingPair(state, null, overridableKeys, keyTag, keyNode, valueNode, _line, _lineStart, _pos));
|
||
} else {
|
||
_result.push(keyNode);
|
||
}
|
||
|
||
skipSeparationSpace(state, true, nodeIndent);
|
||
|
||
ch = state.input.charCodeAt(state.position);
|
||
|
||
if (ch === 0x2C/* , */) {
|
||
readNext = true;
|
||
ch = state.input.charCodeAt(++state.position);
|
||
} else {
|
||
readNext = false;
|
||
}
|
||
}
|
||
|
||
throwError(state, 'unexpected end of the stream within a flow collection');
|
||
}
|
||
|
||
function readBlockScalar(state, nodeIndent) {
|
||
var captureStart,
|
||
folding,
|
||
chomping = CHOMPING_CLIP,
|
||
didReadContent = false,
|
||
detectedIndent = false,
|
||
textIndent = nodeIndent,
|
||
emptyLines = 0,
|
||
atMoreIndented = false,
|
||
tmp,
|
||
ch;
|
||
|
||
ch = state.input.charCodeAt(state.position);
|
||
|
||
if (ch === 0x7C/* | */) {
|
||
folding = false;
|
||
} else if (ch === 0x3E/* > */) {
|
||
folding = true;
|
||
} else {
|
||
return false;
|
||
}
|
||
|
||
state.kind = 'scalar';
|
||
state.result = '';
|
||
|
||
while (ch !== 0) {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
|
||
if (ch === 0x2B/* + */ || ch === 0x2D/* - */) {
|
||
if (CHOMPING_CLIP === chomping) {
|
||
chomping = (ch === 0x2B/* + */) ? CHOMPING_KEEP : CHOMPING_STRIP;
|
||
} else {
|
||
throwError(state, 'repeat of a chomping mode identifier');
|
||
}
|
||
|
||
} else if ((tmp = fromDecimalCode(ch)) >= 0) {
|
||
if (tmp === 0) {
|
||
throwError(state, 'bad explicit indentation width of a block scalar; it cannot be less than one');
|
||
} else if (!detectedIndent) {
|
||
textIndent = nodeIndent + tmp - 1;
|
||
detectedIndent = true;
|
||
} else {
|
||
throwError(state, 'repeat of an indentation width identifier');
|
||
}
|
||
|
||
} else {
|
||
break;
|
||
}
|
||
}
|
||
|
||
if (is_WHITE_SPACE(ch)) {
|
||
do { ch = state.input.charCodeAt(++state.position); }
|
||
while (is_WHITE_SPACE(ch));
|
||
|
||
if (ch === 0x23/* # */) {
|
||
do { ch = state.input.charCodeAt(++state.position); }
|
||
while (!is_EOL(ch) && (ch !== 0));
|
||
}
|
||
}
|
||
|
||
while (ch !== 0) {
|
||
readLineBreak(state);
|
||
state.lineIndent = 0;
|
||
|
||
ch = state.input.charCodeAt(state.position);
|
||
|
||
while ((!detectedIndent || state.lineIndent < textIndent) &&
|
||
(ch === 0x20/* Space */)) {
|
||
state.lineIndent++;
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
|
||
if (!detectedIndent && state.lineIndent > textIndent) {
|
||
textIndent = state.lineIndent;
|
||
}
|
||
|
||
if (is_EOL(ch)) {
|
||
emptyLines++;
|
||
continue;
|
||
}
|
||
|
||
// End of the scalar.
|
||
if (state.lineIndent < textIndent) {
|
||
|
||
// Perform the chomping.
|
||
if (chomping === CHOMPING_KEEP) {
|
||
state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines);
|
||
} else if (chomping === CHOMPING_CLIP) {
|
||
if (didReadContent) { // i.e. only if the scalar is not empty.
|
||
state.result += '\n';
|
||
}
|
||
}
|
||
|
||
// Break this `while` cycle and go to the funciton's epilogue.
|
||
break;
|
||
}
|
||
|
||
// Folded style: use fancy rules to handle line breaks.
|
||
if (folding) {
|
||
|
||
// Lines starting with white space characters (more-indented lines) are not folded.
|
||
if (is_WHITE_SPACE(ch)) {
|
||
atMoreIndented = true;
|
||
// except for the first content line (cf. Example 8.1)
|
||
state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines);
|
||
|
||
// End of more-indented block.
|
||
} else if (atMoreIndented) {
|
||
atMoreIndented = false;
|
||
state.result += common.repeat('\n', emptyLines + 1);
|
||
|
||
// Just one line break - perceive as the same line.
|
||
} else if (emptyLines === 0) {
|
||
if (didReadContent) { // i.e. only if we have already read some scalar content.
|
||
state.result += ' ';
|
||
}
|
||
|
||
// Several line breaks - perceive as different lines.
|
||
} else {
|
||
state.result += common.repeat('\n', emptyLines);
|
||
}
|
||
|
||
// Literal style: just add exact number of line breaks between content lines.
|
||
} else {
|
||
// Keep all line breaks except the header line break.
|
||
state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines);
|
||
}
|
||
|
||
didReadContent = true;
|
||
detectedIndent = true;
|
||
emptyLines = 0;
|
||
captureStart = state.position;
|
||
|
||
while (!is_EOL(ch) && (ch !== 0)) {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
|
||
captureSegment(state, captureStart, state.position, false);
|
||
}
|
||
|
||
return true;
|
||
}
|
||
|
||
function readBlockSequence(state, nodeIndent) {
|
||
var _line,
|
||
_tag = state.tag,
|
||
_anchor = state.anchor,
|
||
_result = [],
|
||
following,
|
||
detected = false,
|
||
ch;
|
||
|
||
// there is a leading tab before this token, so it can't be a block sequence/mapping;
|
||
// it can still be flow sequence/mapping or a scalar
|
||
if (state.firstTabInLine !== -1) return false;
|
||
|
||
if (state.anchor !== null) {
|
||
state.anchorMap[state.anchor] = _result;
|
||
}
|
||
|
||
ch = state.input.charCodeAt(state.position);
|
||
|
||
while (ch !== 0) {
|
||
if (state.firstTabInLine !== -1) {
|
||
state.position = state.firstTabInLine;
|
||
throwError(state, 'tab characters must not be used in indentation');
|
||
}
|
||
|
||
if (ch !== 0x2D/* - */) {
|
||
break;
|
||
}
|
||
|
||
following = state.input.charCodeAt(state.position + 1);
|
||
|
||
if (!is_WS_OR_EOL(following)) {
|
||
break;
|
||
}
|
||
|
||
detected = true;
|
||
state.position++;
|
||
|
||
if (skipSeparationSpace(state, true, -1)) {
|
||
if (state.lineIndent <= nodeIndent) {
|
||
_result.push(null);
|
||
ch = state.input.charCodeAt(state.position);
|
||
continue;
|
||
}
|
||
}
|
||
|
||
_line = state.line;
|
||
composeNode(state, nodeIndent, CONTEXT_BLOCK_IN, false, true);
|
||
_result.push(state.result);
|
||
skipSeparationSpace(state, true, -1);
|
||
|
||
ch = state.input.charCodeAt(state.position);
|
||
|
||
if ((state.line === _line || state.lineIndent > nodeIndent) && (ch !== 0)) {
|
||
throwError(state, 'bad indentation of a sequence entry');
|
||
} else if (state.lineIndent < nodeIndent) {
|
||
break;
|
||
}
|
||
}
|
||
|
||
if (detected) {
|
||
state.tag = _tag;
|
||
state.anchor = _anchor;
|
||
state.kind = 'sequence';
|
||
state.result = _result;
|
||
return true;
|
||
}
|
||
return false;
|
||
}
|
||
|
||
function readBlockMapping(state, nodeIndent, flowIndent) {
|
||
var following,
|
||
allowCompact,
|
||
_line,
|
||
_keyLine,
|
||
_keyLineStart,
|
||
_keyPos,
|
||
_tag = state.tag,
|
||
_anchor = state.anchor,
|
||
_result = {},
|
||
overridableKeys = Object.create(null),
|
||
keyTag = null,
|
||
keyNode = null,
|
||
valueNode = null,
|
||
atExplicitKey = false,
|
||
detected = false,
|
||
ch;
|
||
|
||
// there is a leading tab before this token, so it can't be a block sequence/mapping;
|
||
// it can still be flow sequence/mapping or a scalar
|
||
if (state.firstTabInLine !== -1) return false;
|
||
|
||
if (state.anchor !== null) {
|
||
state.anchorMap[state.anchor] = _result;
|
||
}
|
||
|
||
ch = state.input.charCodeAt(state.position);
|
||
|
||
while (ch !== 0) {
|
||
if (!atExplicitKey && state.firstTabInLine !== -1) {
|
||
state.position = state.firstTabInLine;
|
||
throwError(state, 'tab characters must not be used in indentation');
|
||
}
|
||
|
||
following = state.input.charCodeAt(state.position + 1);
|
||
_line = state.line; // Save the current line.
|
||
|
||
//
|
||
// Explicit notation case. There are two separate blocks:
|
||
// first for the key (denoted by "?") and second for the value (denoted by ":")
|
||
//
|
||
if ((ch === 0x3F/* ? */ || ch === 0x3A/* : */) && is_WS_OR_EOL(following)) {
|
||
|
||
if (ch === 0x3F/* ? */) {
|
||
if (atExplicitKey) {
|
||
storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos);
|
||
keyTag = keyNode = valueNode = null;
|
||
}
|
||
|
||
detected = true;
|
||
atExplicitKey = true;
|
||
allowCompact = true;
|
||
|
||
} else if (atExplicitKey) {
|
||
// i.e. 0x3A/* : */ === character after the explicit key.
|
||
atExplicitKey = false;
|
||
allowCompact = true;
|
||
|
||
} else {
|
||
throwError(state, 'incomplete explicit mapping pair; a key node is missed; or followed by a non-tabulated empty line');
|
||
}
|
||
|
||
state.position += 1;
|
||
ch = following;
|
||
|
||
//
|
||
// Implicit notation case. Flow-style node as the key first, then ":", and the value.
|
||
//
|
||
} else {
|
||
_keyLine = state.line;
|
||
_keyLineStart = state.lineStart;
|
||
_keyPos = state.position;
|
||
|
||
if (!composeNode(state, flowIndent, CONTEXT_FLOW_OUT, false, true)) {
|
||
// Neither implicit nor explicit notation.
|
||
// Reading is done. Go to the epilogue.
|
||
break;
|
||
}
|
||
|
||
if (state.line === _line) {
|
||
ch = state.input.charCodeAt(state.position);
|
||
|
||
while (is_WHITE_SPACE(ch)) {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
|
||
if (ch === 0x3A/* : */) {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
|
||
if (!is_WS_OR_EOL(ch)) {
|
||
throwError(state, 'a whitespace character is expected after the key-value separator within a block mapping');
|
||
}
|
||
|
||
if (atExplicitKey) {
|
||
storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos);
|
||
keyTag = keyNode = valueNode = null;
|
||
}
|
||
|
||
detected = true;
|
||
atExplicitKey = false;
|
||
allowCompact = false;
|
||
keyTag = state.tag;
|
||
keyNode = state.result;
|
||
|
||
} else if (detected) {
|
||
throwError(state, 'can not read an implicit mapping pair; a colon is missed');
|
||
|
||
} else {
|
||
state.tag = _tag;
|
||
state.anchor = _anchor;
|
||
return true; // Keep the result of `composeNode`.
|
||
}
|
||
|
||
} else if (detected) {
|
||
throwError(state, 'can not read a block mapping entry; a multiline key may not be an implicit key');
|
||
|
||
} else {
|
||
state.tag = _tag;
|
||
state.anchor = _anchor;
|
||
return true; // Keep the result of `composeNode`.
|
||
}
|
||
}
|
||
|
||
//
|
||
// Common reading code for both explicit and implicit notations.
|
||
//
|
||
if (state.line === _line || state.lineIndent > nodeIndent) {
|
||
if (atExplicitKey) {
|
||
_keyLine = state.line;
|
||
_keyLineStart = state.lineStart;
|
||
_keyPos = state.position;
|
||
}
|
||
|
||
if (composeNode(state, nodeIndent, CONTEXT_BLOCK_OUT, true, allowCompact)) {
|
||
if (atExplicitKey) {
|
||
keyNode = state.result;
|
||
} else {
|
||
valueNode = state.result;
|
||
}
|
||
}
|
||
|
||
if (!atExplicitKey) {
|
||
storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _keyLine, _keyLineStart, _keyPos);
|
||
keyTag = keyNode = valueNode = null;
|
||
}
|
||
|
||
skipSeparationSpace(state, true, -1);
|
||
ch = state.input.charCodeAt(state.position);
|
||
}
|
||
|
||
if ((state.line === _line || state.lineIndent > nodeIndent) && (ch !== 0)) {
|
||
throwError(state, 'bad indentation of a mapping entry');
|
||
} else if (state.lineIndent < nodeIndent) {
|
||
break;
|
||
}
|
||
}
|
||
|
||
//
|
||
// Epilogue.
|
||
//
|
||
|
||
// Special case: last mapping's node contains only the key in explicit notation.
|
||
if (atExplicitKey) {
|
||
storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos);
|
||
}
|
||
|
||
// Expose the resulting mapping.
|
||
if (detected) {
|
||
state.tag = _tag;
|
||
state.anchor = _anchor;
|
||
state.kind = 'mapping';
|
||
state.result = _result;
|
||
}
|
||
|
||
return detected;
|
||
}
|
||
|
||
function readTagProperty(state) {
|
||
var _position,
|
||
isVerbatim = false,
|
||
isNamed = false,
|
||
tagHandle,
|
||
tagName,
|
||
ch;
|
||
|
||
ch = state.input.charCodeAt(state.position);
|
||
|
||
if (ch !== 0x21/* ! */) return false;
|
||
|
||
if (state.tag !== null) {
|
||
throwError(state, 'duplication of a tag property');
|
||
}
|
||
|
||
ch = state.input.charCodeAt(++state.position);
|
||
|
||
if (ch === 0x3C/* < */) {
|
||
isVerbatim = true;
|
||
ch = state.input.charCodeAt(++state.position);
|
||
|
||
} else if (ch === 0x21/* ! */) {
|
||
isNamed = true;
|
||
tagHandle = '!!';
|
||
ch = state.input.charCodeAt(++state.position);
|
||
|
||
} else {
|
||
tagHandle = '!';
|
||
}
|
||
|
||
_position = state.position;
|
||
|
||
if (isVerbatim) {
|
||
do { ch = state.input.charCodeAt(++state.position); }
|
||
while (ch !== 0 && ch !== 0x3E/* > */);
|
||
|
||
if (state.position < state.length) {
|
||
tagName = state.input.slice(_position, state.position);
|
||
ch = state.input.charCodeAt(++state.position);
|
||
} else {
|
||
throwError(state, 'unexpected end of the stream within a verbatim tag');
|
||
}
|
||
} else {
|
||
while (ch !== 0 && !is_WS_OR_EOL(ch)) {
|
||
|
||
if (ch === 0x21/* ! */) {
|
||
if (!isNamed) {
|
||
tagHandle = state.input.slice(_position - 1, state.position + 1);
|
||
|
||
if (!PATTERN_TAG_HANDLE.test(tagHandle)) {
|
||
throwError(state, 'named tag handle cannot contain such characters');
|
||
}
|
||
|
||
isNamed = true;
|
||
_position = state.position + 1;
|
||
} else {
|
||
throwError(state, 'tag suffix cannot contain exclamation marks');
|
||
}
|
||
}
|
||
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
|
||
tagName = state.input.slice(_position, state.position);
|
||
|
||
if (PATTERN_FLOW_INDICATORS.test(tagName)) {
|
||
throwError(state, 'tag suffix cannot contain flow indicator characters');
|
||
}
|
||
}
|
||
|
||
if (tagName && !PATTERN_TAG_URI.test(tagName)) {
|
||
throwError(state, 'tag name cannot contain such characters: ' + tagName);
|
||
}
|
||
|
||
try {
|
||
tagName = decodeURIComponent(tagName);
|
||
} catch (err) {
|
||
throwError(state, 'tag name is malformed: ' + tagName);
|
||
}
|
||
|
||
if (isVerbatim) {
|
||
state.tag = tagName;
|
||
|
||
} else if (_hasOwnProperty$1.call(state.tagMap, tagHandle)) {
|
||
state.tag = state.tagMap[tagHandle] + tagName;
|
||
|
||
} else if (tagHandle === '!') {
|
||
state.tag = '!' + tagName;
|
||
|
||
} else if (tagHandle === '!!') {
|
||
state.tag = 'tag:yaml.org,2002:' + tagName;
|
||
|
||
} else {
|
||
throwError(state, 'undeclared tag handle "' + tagHandle + '"');
|
||
}
|
||
|
||
return true;
|
||
}
|
||
|
||
function readAnchorProperty(state) {
|
||
var _position,
|
||
ch;
|
||
|
||
ch = state.input.charCodeAt(state.position);
|
||
|
||
if (ch !== 0x26/* & */) return false;
|
||
|
||
if (state.anchor !== null) {
|
||
throwError(state, 'duplication of an anchor property');
|
||
}
|
||
|
||
ch = state.input.charCodeAt(++state.position);
|
||
_position = state.position;
|
||
|
||
while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
|
||
if (state.position === _position) {
|
||
throwError(state, 'name of an anchor node must contain at least one character');
|
||
}
|
||
|
||
state.anchor = state.input.slice(_position, state.position);
|
||
return true;
|
||
}
|
||
|
||
function readAlias(state) {
|
||
var _position, alias,
|
||
ch;
|
||
|
||
ch = state.input.charCodeAt(state.position);
|
||
|
||
if (ch !== 0x2A/* * */) return false;
|
||
|
||
ch = state.input.charCodeAt(++state.position);
|
||
_position = state.position;
|
||
|
||
while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
|
||
if (state.position === _position) {
|
||
throwError(state, 'name of an alias node must contain at least one character');
|
||
}
|
||
|
||
alias = state.input.slice(_position, state.position);
|
||
|
||
if (!_hasOwnProperty$1.call(state.anchorMap, alias)) {
|
||
throwError(state, 'unidentified alias "' + alias + '"');
|
||
}
|
||
|
||
state.result = state.anchorMap[alias];
|
||
skipSeparationSpace(state, true, -1);
|
||
return true;
|
||
}
|
||
|
||
function composeNode(state, parentIndent, nodeContext, allowToSeek, allowCompact) {
|
||
var allowBlockStyles,
|
||
allowBlockScalars,
|
||
allowBlockCollections,
|
||
indentStatus = 1, // 1: this>parent, 0: this=parent, -1: this<parent
|
||
atNewLine = false,
|
||
hasContent = false,
|
||
typeIndex,
|
||
typeQuantity,
|
||
typeList,
|
||
type,
|
||
flowIndent,
|
||
blockIndent;
|
||
|
||
if (state.listener !== null) {
|
||
state.listener('open', state);
|
||
}
|
||
|
||
state.tag = null;
|
||
state.anchor = null;
|
||
state.kind = null;
|
||
state.result = null;
|
||
|
||
allowBlockStyles = allowBlockScalars = allowBlockCollections =
|
||
CONTEXT_BLOCK_OUT === nodeContext ||
|
||
CONTEXT_BLOCK_IN === nodeContext;
|
||
|
||
if (allowToSeek) {
|
||
if (skipSeparationSpace(state, true, -1)) {
|
||
atNewLine = true;
|
||
|
||
if (state.lineIndent > parentIndent) {
|
||
indentStatus = 1;
|
||
} else if (state.lineIndent === parentIndent) {
|
||
indentStatus = 0;
|
||
} else if (state.lineIndent < parentIndent) {
|
||
indentStatus = -1;
|
||
}
|
||
}
|
||
}
|
||
|
||
if (indentStatus === 1) {
|
||
while (readTagProperty(state) || readAnchorProperty(state)) {
|
||
if (skipSeparationSpace(state, true, -1)) {
|
||
atNewLine = true;
|
||
allowBlockCollections = allowBlockStyles;
|
||
|
||
if (state.lineIndent > parentIndent) {
|
||
indentStatus = 1;
|
||
} else if (state.lineIndent === parentIndent) {
|
||
indentStatus = 0;
|
||
} else if (state.lineIndent < parentIndent) {
|
||
indentStatus = -1;
|
||
}
|
||
} else {
|
||
allowBlockCollections = false;
|
||
}
|
||
}
|
||
}
|
||
|
||
if (allowBlockCollections) {
|
||
allowBlockCollections = atNewLine || allowCompact;
|
||
}
|
||
|
||
if (indentStatus === 1 || CONTEXT_BLOCK_OUT === nodeContext) {
|
||
if (CONTEXT_FLOW_IN === nodeContext || CONTEXT_FLOW_OUT === nodeContext) {
|
||
flowIndent = parentIndent;
|
||
} else {
|
||
flowIndent = parentIndent + 1;
|
||
}
|
||
|
||
blockIndent = state.position - state.lineStart;
|
||
|
||
if (indentStatus === 1) {
|
||
if (allowBlockCollections &&
|
||
(readBlockSequence(state, blockIndent) ||
|
||
readBlockMapping(state, blockIndent, flowIndent)) ||
|
||
readFlowCollection(state, flowIndent)) {
|
||
hasContent = true;
|
||
} else {
|
||
if ((allowBlockScalars && readBlockScalar(state, flowIndent)) ||
|
||
readSingleQuotedScalar(state, flowIndent) ||
|
||
readDoubleQuotedScalar(state, flowIndent)) {
|
||
hasContent = true;
|
||
|
||
} else if (readAlias(state)) {
|
||
hasContent = true;
|
||
|
||
if (state.tag !== null || state.anchor !== null) {
|
||
throwError(state, 'alias node should not have any properties');
|
||
}
|
||
|
||
} else if (readPlainScalar(state, flowIndent, CONTEXT_FLOW_IN === nodeContext)) {
|
||
hasContent = true;
|
||
|
||
if (state.tag === null) {
|
||
state.tag = '?';
|
||
}
|
||
}
|
||
|
||
if (state.anchor !== null) {
|
||
state.anchorMap[state.anchor] = state.result;
|
||
}
|
||
}
|
||
} else if (indentStatus === 0) {
|
||
// Special case: block sequences are allowed to have same indentation level as the parent.
|
||
// http://www.yaml.org/spec/1.2/spec.html#id2799784
|
||
hasContent = allowBlockCollections && readBlockSequence(state, blockIndent);
|
||
}
|
||
}
|
||
|
||
if (state.tag === null) {
|
||
if (state.anchor !== null) {
|
||
state.anchorMap[state.anchor] = state.result;
|
||
}
|
||
|
||
} else if (state.tag === '?') {
|
||
// Implicit resolving is not allowed for non-scalar types, and '?'
|
||
// non-specific tag is only automatically assigned to plain scalars.
|
||
//
|
||
// We only need to check kind conformity in case user explicitly assigns '?'
|
||
// tag, for example like this: "!<?> [0]"
|
||
//
|
||
if (state.result !== null && state.kind !== 'scalar') {
|
||
throwError(state, 'unacceptable node kind for !<?> tag; it should be "scalar", not "' + state.kind + '"');
|
||
}
|
||
|
||
for (typeIndex = 0, typeQuantity = state.implicitTypes.length; typeIndex < typeQuantity; typeIndex += 1) {
|
||
type = state.implicitTypes[typeIndex];
|
||
|
||
if (type.resolve(state.result)) { // `state.result` updated in resolver if matched
|
||
state.result = type.construct(state.result);
|
||
state.tag = type.tag;
|
||
if (state.anchor !== null) {
|
||
state.anchorMap[state.anchor] = state.result;
|
||
}
|
||
break;
|
||
}
|
||
}
|
||
} else if (state.tag !== '!') {
|
||
if (_hasOwnProperty$1.call(state.typeMap[state.kind || 'fallback'], state.tag)) {
|
||
type = state.typeMap[state.kind || 'fallback'][state.tag];
|
||
} else {
|
||
// looking for multi type
|
||
type = null;
|
||
typeList = state.typeMap.multi[state.kind || 'fallback'];
|
||
|
||
for (typeIndex = 0, typeQuantity = typeList.length; typeIndex < typeQuantity; typeIndex += 1) {
|
||
if (state.tag.slice(0, typeList[typeIndex].tag.length) === typeList[typeIndex].tag) {
|
||
type = typeList[typeIndex];
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
|
||
if (!type) {
|
||
throwError(state, 'unknown tag !<' + state.tag + '>');
|
||
}
|
||
|
||
if (state.result !== null && type.kind !== state.kind) {
|
||
throwError(state, 'unacceptable node kind for !<' + state.tag + '> tag; it should be "' + type.kind + '", not "' + state.kind + '"');
|
||
}
|
||
|
||
if (!type.resolve(state.result, state.tag)) { // `state.result` updated in resolver if matched
|
||
throwError(state, 'cannot resolve a node with !<' + state.tag + '> explicit tag');
|
||
} else {
|
||
state.result = type.construct(state.result, state.tag);
|
||
if (state.anchor !== null) {
|
||
state.anchorMap[state.anchor] = state.result;
|
||
}
|
||
}
|
||
}
|
||
|
||
if (state.listener !== null) {
|
||
state.listener('close', state);
|
||
}
|
||
return state.tag !== null || state.anchor !== null || hasContent;
|
||
}
|
||
|
||
function readDocument(state) {
|
||
var documentStart = state.position,
|
||
_position,
|
||
directiveName,
|
||
directiveArgs,
|
||
hasDirectives = false,
|
||
ch;
|
||
|
||
state.version = null;
|
||
state.checkLineBreaks = state.legacy;
|
||
state.tagMap = Object.create(null);
|
||
state.anchorMap = Object.create(null);
|
||
|
||
while ((ch = state.input.charCodeAt(state.position)) !== 0) {
|
||
skipSeparationSpace(state, true, -1);
|
||
|
||
ch = state.input.charCodeAt(state.position);
|
||
|
||
if (state.lineIndent > 0 || ch !== 0x25/* % */) {
|
||
break;
|
||
}
|
||
|
||
hasDirectives = true;
|
||
ch = state.input.charCodeAt(++state.position);
|
||
_position = state.position;
|
||
|
||
while (ch !== 0 && !is_WS_OR_EOL(ch)) {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
|
||
directiveName = state.input.slice(_position, state.position);
|
||
directiveArgs = [];
|
||
|
||
if (directiveName.length < 1) {
|
||
throwError(state, 'directive name must not be less than one character in length');
|
||
}
|
||
|
||
while (ch !== 0) {
|
||
while (is_WHITE_SPACE(ch)) {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
|
||
if (ch === 0x23/* # */) {
|
||
do { ch = state.input.charCodeAt(++state.position); }
|
||
while (ch !== 0 && !is_EOL(ch));
|
||
break;
|
||
}
|
||
|
||
if (is_EOL(ch)) break;
|
||
|
||
_position = state.position;
|
||
|
||
while (ch !== 0 && !is_WS_OR_EOL(ch)) {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
|
||
directiveArgs.push(state.input.slice(_position, state.position));
|
||
}
|
||
|
||
if (ch !== 0) readLineBreak(state);
|
||
|
||
if (_hasOwnProperty$1.call(directiveHandlers, directiveName)) {
|
||
directiveHandlers[directiveName](state, directiveName, directiveArgs);
|
||
} else {
|
||
throwWarning(state, 'unknown document directive "' + directiveName + '"');
|
||
}
|
||
}
|
||
|
||
skipSeparationSpace(state, true, -1);
|
||
|
||
if (state.lineIndent === 0 &&
|
||
state.input.charCodeAt(state.position) === 0x2D/* - */ &&
|
||
state.input.charCodeAt(state.position + 1) === 0x2D/* - */ &&
|
||
state.input.charCodeAt(state.position + 2) === 0x2D/* - */) {
|
||
state.position += 3;
|
||
skipSeparationSpace(state, true, -1);
|
||
|
||
} else if (hasDirectives) {
|
||
throwError(state, 'directives end mark is expected');
|
||
}
|
||
|
||
composeNode(state, state.lineIndent - 1, CONTEXT_BLOCK_OUT, false, true);
|
||
skipSeparationSpace(state, true, -1);
|
||
|
||
if (state.checkLineBreaks &&
|
||
PATTERN_NON_ASCII_LINE_BREAKS.test(state.input.slice(documentStart, state.position))) {
|
||
throwWarning(state, 'non-ASCII line breaks are interpreted as content');
|
||
}
|
||
|
||
state.documents.push(state.result);
|
||
|
||
if (state.position === state.lineStart && testDocumentSeparator(state)) {
|
||
|
||
if (state.input.charCodeAt(state.position) === 0x2E/* . */) {
|
||
state.position += 3;
|
||
skipSeparationSpace(state, true, -1);
|
||
}
|
||
return;
|
||
}
|
||
|
||
if (state.position < (state.length - 1)) {
|
||
throwError(state, 'end of the stream or a document separator is expected');
|
||
} else {
|
||
return;
|
||
}
|
||
}
|
||
|
||
|
||
function loadDocuments(input, options) {
|
||
input = String(input);
|
||
options = options || {};
|
||
|
||
if (input.length !== 0) {
|
||
|
||
// Add tailing `\n` if not exists
|
||
if (input.charCodeAt(input.length - 1) !== 0x0A/* LF */ &&
|
||
input.charCodeAt(input.length - 1) !== 0x0D/* CR */) {
|
||
input += '\n';
|
||
}
|
||
|
||
// Strip BOM
|
||
if (input.charCodeAt(0) === 0xFEFF) {
|
||
input = input.slice(1);
|
||
}
|
||
}
|
||
|
||
var state = new State$1(input, options);
|
||
|
||
var nullpos = input.indexOf('\0');
|
||
|
||
if (nullpos !== -1) {
|
||
state.position = nullpos;
|
||
throwError(state, 'null byte is not allowed in input');
|
||
}
|
||
|
||
// Use 0 as string terminator. That significantly simplifies bounds check.
|
||
state.input += '\0';
|
||
|
||
while (state.input.charCodeAt(state.position) === 0x20/* Space */) {
|
||
state.lineIndent += 1;
|
||
state.position += 1;
|
||
}
|
||
|
||
while (state.position < (state.length - 1)) {
|
||
readDocument(state);
|
||
}
|
||
|
||
return state.documents;
|
||
}
|
||
|
||
|
||
function loadAll$1(input, iterator, options) {
|
||
if (iterator !== null && typeof iterator === 'object' && typeof options === 'undefined') {
|
||
options = iterator;
|
||
iterator = null;
|
||
}
|
||
|
||
var documents = loadDocuments(input, options);
|
||
|
||
if (typeof iterator !== 'function') {
|
||
return documents;
|
||
}
|
||
|
||
for (var index = 0, length = documents.length; index < length; index += 1) {
|
||
iterator(documents[index]);
|
||
}
|
||
}
|
||
|
||
|
||
function load$1(input, options) {
|
||
var documents = loadDocuments(input, options);
|
||
|
||
if (documents.length === 0) {
|
||
/*eslint-disable no-undefined*/
|
||
return undefined;
|
||
} else if (documents.length === 1) {
|
||
return documents[0];
|
||
}
|
||
throw new exception('expected a single document in the stream, but found more');
|
||
}
|
||
|
||
|
||
var loadAll_1 = loadAll$1;
|
||
var load_1 = load$1;
|
||
|
||
var loader = {
|
||
loadAll: loadAll_1,
|
||
load: load_1
|
||
};
|
||
|
||
/*eslint-disable no-use-before-define*/
|
||
|
||
|
||
|
||
|
||
|
||
var _toString = Object.prototype.toString;
|
||
var _hasOwnProperty = Object.prototype.hasOwnProperty;
|
||
|
||
var CHAR_BOM = 0xFEFF;
|
||
var CHAR_TAB = 0x09; /* Tab */
|
||
var CHAR_LINE_FEED = 0x0A; /* LF */
|
||
var CHAR_CARRIAGE_RETURN = 0x0D; /* CR */
|
||
var CHAR_SPACE = 0x20; /* Space */
|
||
var CHAR_EXCLAMATION = 0x21; /* ! */
|
||
var CHAR_DOUBLE_QUOTE = 0x22; /* " */
|
||
var CHAR_SHARP = 0x23; /* # */
|
||
var CHAR_PERCENT = 0x25; /* % */
|
||
var CHAR_AMPERSAND = 0x26; /* & */
|
||
var CHAR_SINGLE_QUOTE = 0x27; /* ' */
|
||
var CHAR_ASTERISK = 0x2A; /* * */
|
||
var CHAR_COMMA = 0x2C; /* , */
|
||
var CHAR_MINUS = 0x2D; /* - */
|
||
var CHAR_COLON = 0x3A; /* : */
|
||
var CHAR_EQUALS = 0x3D; /* = */
|
||
var CHAR_GREATER_THAN = 0x3E; /* > */
|
||
var CHAR_QUESTION = 0x3F; /* ? */
|
||
var CHAR_COMMERCIAL_AT = 0x40; /* @ */
|
||
var CHAR_LEFT_SQUARE_BRACKET = 0x5B; /* [ */
|
||
var CHAR_RIGHT_SQUARE_BRACKET = 0x5D; /* ] */
|
||
var CHAR_GRAVE_ACCENT = 0x60; /* ` */
|
||
var CHAR_LEFT_CURLY_BRACKET = 0x7B; /* { */
|
||
var CHAR_VERTICAL_LINE = 0x7C; /* | */
|
||
var CHAR_RIGHT_CURLY_BRACKET = 0x7D; /* } */
|
||
|
||
var ESCAPE_SEQUENCES = {};
|
||
|
||
ESCAPE_SEQUENCES[0x00] = '\\0';
|
||
ESCAPE_SEQUENCES[0x07] = '\\a';
|
||
ESCAPE_SEQUENCES[0x08] = '\\b';
|
||
ESCAPE_SEQUENCES[0x09] = '\\t';
|
||
ESCAPE_SEQUENCES[0x0A] = '\\n';
|
||
ESCAPE_SEQUENCES[0x0B] = '\\v';
|
||
ESCAPE_SEQUENCES[0x0C] = '\\f';
|
||
ESCAPE_SEQUENCES[0x0D] = '\\r';
|
||
ESCAPE_SEQUENCES[0x1B] = '\\e';
|
||
ESCAPE_SEQUENCES[0x22] = '\\"';
|
||
ESCAPE_SEQUENCES[0x5C] = '\\\\';
|
||
ESCAPE_SEQUENCES[0x85] = '\\N';
|
||
ESCAPE_SEQUENCES[0xA0] = '\\_';
|
||
ESCAPE_SEQUENCES[0x2028] = '\\L';
|
||
ESCAPE_SEQUENCES[0x2029] = '\\P';
|
||
|
||
var DEPRECATED_BOOLEANS_SYNTAX = [
|
||
'y', 'Y', 'yes', 'Yes', 'YES', 'on', 'On', 'ON',
|
||
'n', 'N', 'no', 'No', 'NO', 'off', 'Off', 'OFF'
|
||
];
|
||
|
||
var DEPRECATED_BASE60_SYNTAX = /^[-+]?[0-9_]+(?::[0-9_]+)+(?:\.[0-9_]*)?$/;
|
||
|
||
function compileStyleMap(schema, map) {
|
||
var result, keys, index, length, tag, style, type;
|
||
|
||
if (map === null) return {};
|
||
|
||
result = {};
|
||
keys = Object.keys(map);
|
||
|
||
for (index = 0, length = keys.length; index < length; index += 1) {
|
||
tag = keys[index];
|
||
style = String(map[tag]);
|
||
|
||
if (tag.slice(0, 2) === '!!') {
|
||
tag = 'tag:yaml.org,2002:' + tag.slice(2);
|
||
}
|
||
type = schema.compiledTypeMap['fallback'][tag];
|
||
|
||
if (type && _hasOwnProperty.call(type.styleAliases, style)) {
|
||
style = type.styleAliases[style];
|
||
}
|
||
|
||
result[tag] = style;
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
function encodeHex(character) {
|
||
var string, handle, length;
|
||
|
||
string = character.toString(16).toUpperCase();
|
||
|
||
if (character <= 0xFF) {
|
||
handle = 'x';
|
||
length = 2;
|
||
} else if (character <= 0xFFFF) {
|
||
handle = 'u';
|
||
length = 4;
|
||
} else if (character <= 0xFFFFFFFF) {
|
||
handle = 'U';
|
||
length = 8;
|
||
} else {
|
||
throw new exception('code point within a string may not be greater than 0xFFFFFFFF');
|
||
}
|
||
|
||
return '\\' + handle + common.repeat('0', length - string.length) + string;
|
||
}
|
||
|
||
|
||
var QUOTING_TYPE_SINGLE = 1,
|
||
QUOTING_TYPE_DOUBLE = 2;
|
||
|
||
function State(options) {
|
||
this.schema = options['schema'] || _default;
|
||
this.indent = Math.max(1, (options['indent'] || 2));
|
||
this.noArrayIndent = options['noArrayIndent'] || false;
|
||
this.skipInvalid = options['skipInvalid'] || false;
|
||
this.flowLevel = (common.isNothing(options['flowLevel']) ? -1 : options['flowLevel']);
|
||
this.styleMap = compileStyleMap(this.schema, options['styles'] || null);
|
||
this.sortKeys = options['sortKeys'] || false;
|
||
this.lineWidth = options['lineWidth'] || 80;
|
||
this.noRefs = options['noRefs'] || false;
|
||
this.noCompatMode = options['noCompatMode'] || false;
|
||
this.condenseFlow = options['condenseFlow'] || false;
|
||
this.quotingType = options['quotingType'] === '"' ? QUOTING_TYPE_DOUBLE : QUOTING_TYPE_SINGLE;
|
||
this.forceQuotes = options['forceQuotes'] || false;
|
||
this.replacer = typeof options['replacer'] === 'function' ? options['replacer'] : null;
|
||
|
||
this.implicitTypes = this.schema.compiledImplicit;
|
||
this.explicitTypes = this.schema.compiledExplicit;
|
||
|
||
this.tag = null;
|
||
this.result = '';
|
||
|
||
this.duplicates = [];
|
||
this.usedDuplicates = null;
|
||
}
|
||
|
||
// Indents every line in a string. Empty lines (\n only) are not indented.
|
||
function indentString(string, spaces) {
|
||
var ind = common.repeat(' ', spaces),
|
||
position = 0,
|
||
next = -1,
|
||
result = '',
|
||
line,
|
||
length = string.length;
|
||
|
||
while (position < length) {
|
||
next = string.indexOf('\n', position);
|
||
if (next === -1) {
|
||
line = string.slice(position);
|
||
position = length;
|
||
} else {
|
||
line = string.slice(position, next + 1);
|
||
position = next + 1;
|
||
}
|
||
|
||
if (line.length && line !== '\n') result += ind;
|
||
|
||
result += line;
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
function generateNextLine(state, level) {
|
||
return '\n' + common.repeat(' ', state.indent * level);
|
||
}
|
||
|
||
function testImplicitResolving(state, str) {
|
||
var index, length, type;
|
||
|
||
for (index = 0, length = state.implicitTypes.length; index < length; index += 1) {
|
||
type = state.implicitTypes[index];
|
||
|
||
if (type.resolve(str)) {
|
||
return true;
|
||
}
|
||
}
|
||
|
||
return false;
|
||
}
|
||
|
||
// [33] s-white ::= s-space | s-tab
|
||
function isWhitespace(c) {
|
||
return c === CHAR_SPACE || c === CHAR_TAB;
|
||
}
|
||
|
||
// Returns true if the character can be printed without escaping.
|
||
// From YAML 1.2: "any allowed characters known to be non-printable
|
||
// should also be escaped. [However,] This isn’t mandatory"
|
||
// Derived from nb-char - \t - #x85 - #xA0 - #x2028 - #x2029.
|
||
function isPrintable(c) {
|
||
return (0x00020 <= c && c <= 0x00007E)
|
||
|| ((0x000A1 <= c && c <= 0x00D7FF) && c !== 0x2028 && c !== 0x2029)
|
||
|| ((0x0E000 <= c && c <= 0x00FFFD) && c !== CHAR_BOM)
|
||
|| (0x10000 <= c && c <= 0x10FFFF);
|
||
}
|
||
|
||
// [34] ns-char ::= nb-char - s-white
|
||
// [27] nb-char ::= c-printable - b-char - c-byte-order-mark
|
||
// [26] b-char ::= b-line-feed | b-carriage-return
|
||
// Including s-white (for some reason, examples doesn't match specs in this aspect)
|
||
// ns-char ::= c-printable - b-line-feed - b-carriage-return - c-byte-order-mark
|
||
function isNsCharOrWhitespace(c) {
|
||
return isPrintable(c)
|
||
&& c !== CHAR_BOM
|
||
// - b-char
|
||
&& c !== CHAR_CARRIAGE_RETURN
|
||
&& c !== CHAR_LINE_FEED;
|
||
}
|
||
|
||
// [127] ns-plain-safe(c) ::= c = flow-out ⇒ ns-plain-safe-out
|
||
// c = flow-in ⇒ ns-plain-safe-in
|
||
// c = block-key ⇒ ns-plain-safe-out
|
||
// c = flow-key ⇒ ns-plain-safe-in
|
||
// [128] ns-plain-safe-out ::= ns-char
|
||
// [129] ns-plain-safe-in ::= ns-char - c-flow-indicator
|
||
// [130] ns-plain-char(c) ::= ( ns-plain-safe(c) - “:” - “#” )
|
||
// | ( /* An ns-char preceding */ “#” )
|
||
// | ( “:” /* Followed by an ns-plain-safe(c) */ )
|
||
function isPlainSafe(c, prev, inblock) {
|
||
var cIsNsCharOrWhitespace = isNsCharOrWhitespace(c);
|
||
var cIsNsChar = cIsNsCharOrWhitespace && !isWhitespace(c);
|
||
return (
|
||
// ns-plain-safe
|
||
inblock ? // c = flow-in
|
||
cIsNsCharOrWhitespace
|
||
: cIsNsCharOrWhitespace
|
||
// - c-flow-indicator
|
||
&& c !== CHAR_COMMA
|
||
&& c !== CHAR_LEFT_SQUARE_BRACKET
|
||
&& c !== CHAR_RIGHT_SQUARE_BRACKET
|
||
&& c !== CHAR_LEFT_CURLY_BRACKET
|
||
&& c !== CHAR_RIGHT_CURLY_BRACKET
|
||
)
|
||
// ns-plain-char
|
||
&& c !== CHAR_SHARP // false on '#'
|
||
&& !(prev === CHAR_COLON && !cIsNsChar) // false on ': '
|
||
|| (isNsCharOrWhitespace(prev) && !isWhitespace(prev) && c === CHAR_SHARP) // change to true on '[^ ]#'
|
||
|| (prev === CHAR_COLON && cIsNsChar); // change to true on ':[^ ]'
|
||
}
|
||
|
||
// Simplified test for values allowed as the first character in plain style.
|
||
function isPlainSafeFirst(c) {
|
||
// Uses a subset of ns-char - c-indicator
|
||
// where ns-char = nb-char - s-white.
|
||
// No support of ( ( “?” | “:” | “-” ) /* Followed by an ns-plain-safe(c)) */ ) part
|
||
return isPrintable(c) && c !== CHAR_BOM
|
||
&& !isWhitespace(c) // - s-white
|
||
// - (c-indicator ::=
|
||
// “-” | “?” | “:” | “,” | “[” | “]” | “{” | “}”
|
||
&& c !== CHAR_MINUS
|
||
&& c !== CHAR_QUESTION
|
||
&& c !== CHAR_COLON
|
||
&& c !== CHAR_COMMA
|
||
&& c !== CHAR_LEFT_SQUARE_BRACKET
|
||
&& c !== CHAR_RIGHT_SQUARE_BRACKET
|
||
&& c !== CHAR_LEFT_CURLY_BRACKET
|
||
&& c !== CHAR_RIGHT_CURLY_BRACKET
|
||
// | “#” | “&” | “*” | “!” | “|” | “=” | “>” | “'” | “"”
|
||
&& c !== CHAR_SHARP
|
||
&& c !== CHAR_AMPERSAND
|
||
&& c !== CHAR_ASTERISK
|
||
&& c !== CHAR_EXCLAMATION
|
||
&& c !== CHAR_VERTICAL_LINE
|
||
&& c !== CHAR_EQUALS
|
||
&& c !== CHAR_GREATER_THAN
|
||
&& c !== CHAR_SINGLE_QUOTE
|
||
&& c !== CHAR_DOUBLE_QUOTE
|
||
// | “%” | “@” | “`”)
|
||
&& c !== CHAR_PERCENT
|
||
&& c !== CHAR_COMMERCIAL_AT
|
||
&& c !== CHAR_GRAVE_ACCENT;
|
||
}
|
||
|
||
// Simplified test for values allowed as the last character in plain style.
|
||
function isPlainSafeLast(c) {
|
||
// just not whitespace or colon, it will be checked to be plain character later
|
||
return !isWhitespace(c) && c !== CHAR_COLON;
|
||
}
|
||
|
||
// Same as 'string'.codePointAt(pos), but works in older browsers.
|
||
function codePointAt(string, pos) {
|
||
var first = string.charCodeAt(pos), second;
|
||
if (first >= 0xD800 && first <= 0xDBFF && pos + 1 < string.length) {
|
||
second = string.charCodeAt(pos + 1);
|
||
if (second >= 0xDC00 && second <= 0xDFFF) {
|
||
// https://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae
|
||
return (first - 0xD800) * 0x400 + second - 0xDC00 + 0x10000;
|
||
}
|
||
}
|
||
return first;
|
||
}
|
||
|
||
// Determines whether block indentation indicator is required.
|
||
function needIndentIndicator(string) {
|
||
var leadingSpaceRe = /^\n* /;
|
||
return leadingSpaceRe.test(string);
|
||
}
|
||
|
||
var STYLE_PLAIN = 1,
|
||
STYLE_SINGLE = 2,
|
||
STYLE_LITERAL = 3,
|
||
STYLE_FOLDED = 4,
|
||
STYLE_DOUBLE = 5;
|
||
|
||
// Determines which scalar styles are possible and returns the preferred style.
|
||
// lineWidth = -1 => no limit.
|
||
// Pre-conditions: str.length > 0.
|
||
// Post-conditions:
|
||
// STYLE_PLAIN or STYLE_SINGLE => no \n are in the string.
|
||
// STYLE_LITERAL => no lines are suitable for folding (or lineWidth is -1).
|
||
// STYLE_FOLDED => a line > lineWidth and can be folded (and lineWidth != -1).
|
||
function chooseScalarStyle(string, singleLineOnly, indentPerLevel, lineWidth,
|
||
testAmbiguousType, quotingType, forceQuotes, inblock) {
|
||
|
||
var i;
|
||
var char = 0;
|
||
var prevChar = null;
|
||
var hasLineBreak = false;
|
||
var hasFoldableLine = false; // only checked if shouldTrackWidth
|
||
var shouldTrackWidth = lineWidth !== -1;
|
||
var previousLineBreak = -1; // count the first line correctly
|
||
var plain = isPlainSafeFirst(codePointAt(string, 0))
|
||
&& isPlainSafeLast(codePointAt(string, string.length - 1));
|
||
|
||
if (singleLineOnly || forceQuotes) {
|
||
// Case: no block styles.
|
||
// Check for disallowed characters to rule out plain and single.
|
||
for (i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) {
|
||
char = codePointAt(string, i);
|
||
if (!isPrintable(char)) {
|
||
return STYLE_DOUBLE;
|
||
}
|
||
plain = plain && isPlainSafe(char, prevChar, inblock);
|
||
prevChar = char;
|
||
}
|
||
} else {
|
||
// Case: block styles permitted.
|
||
for (i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) {
|
||
char = codePointAt(string, i);
|
||
if (char === CHAR_LINE_FEED) {
|
||
hasLineBreak = true;
|
||
// Check if any line can be folded.
|
||
if (shouldTrackWidth) {
|
||
hasFoldableLine = hasFoldableLine ||
|
||
// Foldable line = too long, and not more-indented.
|
||
(i - previousLineBreak - 1 > lineWidth &&
|
||
string[previousLineBreak + 1] !== ' ');
|
||
previousLineBreak = i;
|
||
}
|
||
} else if (!isPrintable(char)) {
|
||
return STYLE_DOUBLE;
|
||
}
|
||
plain = plain && isPlainSafe(char, prevChar, inblock);
|
||
prevChar = char;
|
||
}
|
||
// in case the end is missing a \n
|
||
hasFoldableLine = hasFoldableLine || (shouldTrackWidth &&
|
||
(i - previousLineBreak - 1 > lineWidth &&
|
||
string[previousLineBreak + 1] !== ' '));
|
||
}
|
||
// Although every style can represent \n without escaping, prefer block styles
|
||
// for multiline, since they're more readable and they don't add empty lines.
|
||
// Also prefer folding a super-long line.
|
||
if (!hasLineBreak && !hasFoldableLine) {
|
||
// Strings interpretable as another type have to be quoted;
|
||
// e.g. the string 'true' vs. the boolean true.
|
||
if (plain && !forceQuotes && !testAmbiguousType(string)) {
|
||
return STYLE_PLAIN;
|
||
}
|
||
return quotingType === QUOTING_TYPE_DOUBLE ? STYLE_DOUBLE : STYLE_SINGLE;
|
||
}
|
||
// Edge case: block indentation indicator can only have one digit.
|
||
if (indentPerLevel > 9 && needIndentIndicator(string)) {
|
||
return STYLE_DOUBLE;
|
||
}
|
||
// At this point we know block styles are valid.
|
||
// Prefer literal style unless we want to fold.
|
||
if (!forceQuotes) {
|
||
return hasFoldableLine ? STYLE_FOLDED : STYLE_LITERAL;
|
||
}
|
||
return quotingType === QUOTING_TYPE_DOUBLE ? STYLE_DOUBLE : STYLE_SINGLE;
|
||
}
|
||
|
||
// Note: line breaking/folding is implemented for only the folded style.
|
||
// NB. We drop the last trailing newline (if any) of a returned block scalar
|
||
// since the dumper adds its own newline. This always works:
|
||
// • No ending newline => unaffected; already using strip "-" chomping.
|
||
// • Ending newline => removed then restored.
|
||
// Importantly, this keeps the "+" chomp indicator from gaining an extra line.
|
||
function writeScalar(state, string, level, iskey, inblock) {
|
||
state.dump = (function () {
|
||
if (string.length === 0) {
|
||
return state.quotingType === QUOTING_TYPE_DOUBLE ? '""' : "''";
|
||
}
|
||
if (!state.noCompatMode) {
|
||
if (DEPRECATED_BOOLEANS_SYNTAX.indexOf(string) !== -1 || DEPRECATED_BASE60_SYNTAX.test(string)) {
|
||
return state.quotingType === QUOTING_TYPE_DOUBLE ? ('"' + string + '"') : ("'" + string + "'");
|
||
}
|
||
}
|
||
|
||
var indent = state.indent * Math.max(1, level); // no 0-indent scalars
|
||
// As indentation gets deeper, let the width decrease monotonically
|
||
// to the lower bound min(state.lineWidth, 40).
|
||
// Note that this implies
|
||
// state.lineWidth ≤ 40 + state.indent: width is fixed at the lower bound.
|
||
// state.lineWidth > 40 + state.indent: width decreases until the lower bound.
|
||
// This behaves better than a constant minimum width which disallows narrower options,
|
||
// or an indent threshold which causes the width to suddenly increase.
|
||
var lineWidth = state.lineWidth === -1
|
||
? -1 : Math.max(Math.min(state.lineWidth, 40), state.lineWidth - indent);
|
||
|
||
// Without knowing if keys are implicit/explicit, assume implicit for safety.
|
||
var singleLineOnly = iskey
|
||
// No block styles in flow mode.
|
||
|| (state.flowLevel > -1 && level >= state.flowLevel);
|
||
function testAmbiguity(string) {
|
||
return testImplicitResolving(state, string);
|
||
}
|
||
|
||
switch (chooseScalarStyle(string, singleLineOnly, state.indent, lineWidth,
|
||
testAmbiguity, state.quotingType, state.forceQuotes && !iskey, inblock)) {
|
||
|
||
case STYLE_PLAIN:
|
||
return string;
|
||
case STYLE_SINGLE:
|
||
return "'" + string.replace(/'/g, "''") + "'";
|
||
case STYLE_LITERAL:
|
||
return '|' + blockHeader(string, state.indent)
|
||
+ dropEndingNewline(indentString(string, indent));
|
||
case STYLE_FOLDED:
|
||
return '>' + blockHeader(string, state.indent)
|
||
+ dropEndingNewline(indentString(foldString(string, lineWidth), indent));
|
||
case STYLE_DOUBLE:
|
||
return '"' + escapeString(string) + '"';
|
||
default:
|
||
throw new exception('impossible error: invalid scalar style');
|
||
}
|
||
}());
|
||
}
|
||
|
||
// Pre-conditions: string is valid for a block scalar, 1 <= indentPerLevel <= 9.
|
||
function blockHeader(string, indentPerLevel) {
|
||
var indentIndicator = needIndentIndicator(string) ? String(indentPerLevel) : '';
|
||
|
||
// note the special case: the string '\n' counts as a "trailing" empty line.
|
||
var clip = string[string.length - 1] === '\n';
|
||
var keep = clip && (string[string.length - 2] === '\n' || string === '\n');
|
||
var chomp = keep ? '+' : (clip ? '' : '-');
|
||
|
||
return indentIndicator + chomp + '\n';
|
||
}
|
||
|
||
// (See the note for writeScalar.)
|
||
function dropEndingNewline(string) {
|
||
return string[string.length - 1] === '\n' ? string.slice(0, -1) : string;
|
||
}
|
||
|
||
// Note: a long line without a suitable break point will exceed the width limit.
|
||
// Pre-conditions: every char in str isPrintable, str.length > 0, width > 0.
|
||
function foldString(string, width) {
|
||
// In folded style, $k$ consecutive newlines output as $k+1$ newlines—
|
||
// unless they're before or after a more-indented line, or at the very
|
||
// beginning or end, in which case $k$ maps to $k$.
|
||
// Therefore, parse each chunk as newline(s) followed by a content line.
|
||
var lineRe = /(\n+)([^\n]*)/g;
|
||
|
||
// first line (possibly an empty line)
|
||
var result = (function () {
|
||
var nextLF = string.indexOf('\n');
|
||
nextLF = nextLF !== -1 ? nextLF : string.length;
|
||
lineRe.lastIndex = nextLF;
|
||
return foldLine(string.slice(0, nextLF), width);
|
||
}());
|
||
// If we haven't reached the first content line yet, don't add an extra \n.
|
||
var prevMoreIndented = string[0] === '\n' || string[0] === ' ';
|
||
var moreIndented;
|
||
|
||
// rest of the lines
|
||
var match;
|
||
while ((match = lineRe.exec(string))) {
|
||
var prefix = match[1], line = match[2];
|
||
moreIndented = (line[0] === ' ');
|
||
result += prefix
|
||
+ (!prevMoreIndented && !moreIndented && line !== ''
|
||
? '\n' : '')
|
||
+ foldLine(line, width);
|
||
prevMoreIndented = moreIndented;
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
// Greedy line breaking.
|
||
// Picks the longest line under the limit each time,
|
||
// otherwise settles for the shortest line over the limit.
|
||
// NB. More-indented lines *cannot* be folded, as that would add an extra \n.
|
||
function foldLine(line, width) {
|
||
if (line === '' || line[0] === ' ') return line;
|
||
|
||
// Since a more-indented line adds a \n, breaks can't be followed by a space.
|
||
var breakRe = / [^ ]/g; // note: the match index will always be <= length-2.
|
||
var match;
|
||
// start is an inclusive index. end, curr, and next are exclusive.
|
||
var start = 0, end, curr = 0, next = 0;
|
||
var result = '';
|
||
|
||
// Invariants: 0 <= start <= length-1.
|
||
// 0 <= curr <= next <= max(0, length-2). curr - start <= width.
|
||
// Inside the loop:
|
||
// A match implies length >= 2, so curr and next are <= length-2.
|
||
while ((match = breakRe.exec(line))) {
|
||
next = match.index;
|
||
// maintain invariant: curr - start <= width
|
||
if (next - start > width) {
|
||
end = (curr > start) ? curr : next; // derive end <= length-2
|
||
result += '\n' + line.slice(start, end);
|
||
// skip the space that was output as \n
|
||
start = end + 1; // derive start <= length-1
|
||
}
|
||
curr = next;
|
||
}
|
||
|
||
// By the invariants, start <= length-1, so there is something left over.
|
||
// It is either the whole string or a part starting from non-whitespace.
|
||
result += '\n';
|
||
// Insert a break if the remainder is too long and there is a break available.
|
||
if (line.length - start > width && curr > start) {
|
||
result += line.slice(start, curr) + '\n' + line.slice(curr + 1);
|
||
} else {
|
||
result += line.slice(start);
|
||
}
|
||
|
||
return result.slice(1); // drop extra \n joiner
|
||
}
|
||
|
||
// Escapes a double-quoted string.
|
||
function escapeString(string) {
|
||
var result = '';
|
||
var char = 0;
|
||
var escapeSeq;
|
||
|
||
for (var i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) {
|
||
char = codePointAt(string, i);
|
||
escapeSeq = ESCAPE_SEQUENCES[char];
|
||
|
||
if (!escapeSeq && isPrintable(char)) {
|
||
result += string[i];
|
||
if (char >= 0x10000) result += string[i + 1];
|
||
} else {
|
||
result += escapeSeq || encodeHex(char);
|
||
}
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
function writeFlowSequence(state, level, object) {
|
||
var _result = '',
|
||
_tag = state.tag,
|
||
index,
|
||
length,
|
||
value;
|
||
|
||
for (index = 0, length = object.length; index < length; index += 1) {
|
||
value = object[index];
|
||
|
||
if (state.replacer) {
|
||
value = state.replacer.call(object, String(index), value);
|
||
}
|
||
|
||
// Write only valid elements, put null instead of invalid elements.
|
||
if (writeNode(state, level, value, false, false) ||
|
||
(typeof value === 'undefined' &&
|
||
writeNode(state, level, null, false, false))) {
|
||
|
||
if (_result !== '') _result += ',' + (!state.condenseFlow ? ' ' : '');
|
||
_result += state.dump;
|
||
}
|
||
}
|
||
|
||
state.tag = _tag;
|
||
state.dump = '[' + _result + ']';
|
||
}
|
||
|
||
function writeBlockSequence(state, level, object, compact) {
|
||
var _result = '',
|
||
_tag = state.tag,
|
||
index,
|
||
length,
|
||
value;
|
||
|
||
for (index = 0, length = object.length; index < length; index += 1) {
|
||
value = object[index];
|
||
|
||
if (state.replacer) {
|
||
value = state.replacer.call(object, String(index), value);
|
||
}
|
||
|
||
// Write only valid elements, put null instead of invalid elements.
|
||
if (writeNode(state, level + 1, value, true, true, false, true) ||
|
||
(typeof value === 'undefined' &&
|
||
writeNode(state, level + 1, null, true, true, false, true))) {
|
||
|
||
if (!compact || _result !== '') {
|
||
_result += generateNextLine(state, level);
|
||
}
|
||
|
||
if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) {
|
||
_result += '-';
|
||
} else {
|
||
_result += '- ';
|
||
}
|
||
|
||
_result += state.dump;
|
||
}
|
||
}
|
||
|
||
state.tag = _tag;
|
||
state.dump = _result || '[]'; // Empty sequence if no valid values.
|
||
}
|
||
|
||
function writeFlowMapping(state, level, object) {
|
||
var _result = '',
|
||
_tag = state.tag,
|
||
objectKeyList = Object.keys(object),
|
||
index,
|
||
length,
|
||
objectKey,
|
||
objectValue,
|
||
pairBuffer;
|
||
|
||
for (index = 0, length = objectKeyList.length; index < length; index += 1) {
|
||
|
||
pairBuffer = '';
|
||
if (_result !== '') pairBuffer += ', ';
|
||
|
||
if (state.condenseFlow) pairBuffer += '"';
|
||
|
||
objectKey = objectKeyList[index];
|
||
objectValue = object[objectKey];
|
||
|
||
if (state.replacer) {
|
||
objectValue = state.replacer.call(object, objectKey, objectValue);
|
||
}
|
||
|
||
if (!writeNode(state, level, objectKey, false, false)) {
|
||
continue; // Skip this pair because of invalid key;
|
||
}
|
||
|
||
if (state.dump.length > 1024) pairBuffer += '? ';
|
||
|
||
pairBuffer += state.dump + (state.condenseFlow ? '"' : '') + ':' + (state.condenseFlow ? '' : ' ');
|
||
|
||
if (!writeNode(state, level, objectValue, false, false)) {
|
||
continue; // Skip this pair because of invalid value.
|
||
}
|
||
|
||
pairBuffer += state.dump;
|
||
|
||
// Both key and value are valid.
|
||
_result += pairBuffer;
|
||
}
|
||
|
||
state.tag = _tag;
|
||
state.dump = '{' + _result + '}';
|
||
}
|
||
|
||
function writeBlockMapping(state, level, object, compact) {
|
||
var _result = '',
|
||
_tag = state.tag,
|
||
objectKeyList = Object.keys(object),
|
||
index,
|
||
length,
|
||
objectKey,
|
||
objectValue,
|
||
explicitPair,
|
||
pairBuffer;
|
||
|
||
// Allow sorting keys so that the output file is deterministic
|
||
if (state.sortKeys === true) {
|
||
// Default sorting
|
||
objectKeyList.sort();
|
||
} else if (typeof state.sortKeys === 'function') {
|
||
// Custom sort function
|
||
objectKeyList.sort(state.sortKeys);
|
||
} else if (state.sortKeys) {
|
||
// Something is wrong
|
||
throw new exception('sortKeys must be a boolean or a function');
|
||
}
|
||
|
||
for (index = 0, length = objectKeyList.length; index < length; index += 1) {
|
||
pairBuffer = '';
|
||
|
||
if (!compact || _result !== '') {
|
||
pairBuffer += generateNextLine(state, level);
|
||
}
|
||
|
||
objectKey = objectKeyList[index];
|
||
objectValue = object[objectKey];
|
||
|
||
if (state.replacer) {
|
||
objectValue = state.replacer.call(object, objectKey, objectValue);
|
||
}
|
||
|
||
if (!writeNode(state, level + 1, objectKey, true, true, true)) {
|
||
continue; // Skip this pair because of invalid key.
|
||
}
|
||
|
||
explicitPair = (state.tag !== null && state.tag !== '?') ||
|
||
(state.dump && state.dump.length > 1024);
|
||
|
||
if (explicitPair) {
|
||
if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) {
|
||
pairBuffer += '?';
|
||
} else {
|
||
pairBuffer += '? ';
|
||
}
|
||
}
|
||
|
||
pairBuffer += state.dump;
|
||
|
||
if (explicitPair) {
|
||
pairBuffer += generateNextLine(state, level);
|
||
}
|
||
|
||
if (!writeNode(state, level + 1, objectValue, true, explicitPair)) {
|
||
continue; // Skip this pair because of invalid value.
|
||
}
|
||
|
||
if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) {
|
||
pairBuffer += ':';
|
||
} else {
|
||
pairBuffer += ': ';
|
||
}
|
||
|
||
pairBuffer += state.dump;
|
||
|
||
// Both key and value are valid.
|
||
_result += pairBuffer;
|
||
}
|
||
|
||
state.tag = _tag;
|
||
state.dump = _result || '{}'; // Empty mapping if no valid pairs.
|
||
}
|
||
|
||
function detectType(state, object, explicit) {
|
||
var _result, typeList, index, length, type, style;
|
||
|
||
typeList = explicit ? state.explicitTypes : state.implicitTypes;
|
||
|
||
for (index = 0, length = typeList.length; index < length; index += 1) {
|
||
type = typeList[index];
|
||
|
||
if ((type.instanceOf || type.predicate) &&
|
||
(!type.instanceOf || ((typeof object === 'object') && (object instanceof type.instanceOf))) &&
|
||
(!type.predicate || type.predicate(object))) {
|
||
|
||
if (explicit) {
|
||
if (type.multi && type.representName) {
|
||
state.tag = type.representName(object);
|
||
} else {
|
||
state.tag = type.tag;
|
||
}
|
||
} else {
|
||
state.tag = '?';
|
||
}
|
||
|
||
if (type.represent) {
|
||
style = state.styleMap[type.tag] || type.defaultStyle;
|
||
|
||
if (_toString.call(type.represent) === '[object Function]') {
|
||
_result = type.represent(object, style);
|
||
} else if (_hasOwnProperty.call(type.represent, style)) {
|
||
_result = type.represent[style](object, style);
|
||
} else {
|
||
throw new exception('!<' + type.tag + '> tag resolver accepts not "' + style + '" style');
|
||
}
|
||
|
||
state.dump = _result;
|
||
}
|
||
|
||
return true;
|
||
}
|
||
}
|
||
|
||
return false;
|
||
}
|
||
|
||
// Serializes `object` and writes it to global `result`.
|
||
// Returns true on success, or false on invalid object.
|
||
//
|
||
function writeNode(state, level, object, block, compact, iskey, isblockseq) {
|
||
state.tag = null;
|
||
state.dump = object;
|
||
|
||
if (!detectType(state, object, false)) {
|
||
detectType(state, object, true);
|
||
}
|
||
|
||
var type = _toString.call(state.dump);
|
||
var inblock = block;
|
||
var tagStr;
|
||
|
||
if (block) {
|
||
block = (state.flowLevel < 0 || state.flowLevel > level);
|
||
}
|
||
|
||
var objectOrArray = type === '[object Object]' || type === '[object Array]',
|
||
duplicateIndex,
|
||
duplicate;
|
||
|
||
if (objectOrArray) {
|
||
duplicateIndex = state.duplicates.indexOf(object);
|
||
duplicate = duplicateIndex !== -1;
|
||
}
|
||
|
||
if ((state.tag !== null && state.tag !== '?') || duplicate || (state.indent !== 2 && level > 0)) {
|
||
compact = false;
|
||
}
|
||
|
||
if (duplicate && state.usedDuplicates[duplicateIndex]) {
|
||
state.dump = '*ref_' + duplicateIndex;
|
||
} else {
|
||
if (objectOrArray && duplicate && !state.usedDuplicates[duplicateIndex]) {
|
||
state.usedDuplicates[duplicateIndex] = true;
|
||
}
|
||
if (type === '[object Object]') {
|
||
if (block && (Object.keys(state.dump).length !== 0)) {
|
||
writeBlockMapping(state, level, state.dump, compact);
|
||
if (duplicate) {
|
||
state.dump = '&ref_' + duplicateIndex + state.dump;
|
||
}
|
||
} else {
|
||
writeFlowMapping(state, level, state.dump);
|
||
if (duplicate) {
|
||
state.dump = '&ref_' + duplicateIndex + ' ' + state.dump;
|
||
}
|
||
}
|
||
} else if (type === '[object Array]') {
|
||
if (block && (state.dump.length !== 0)) {
|
||
if (state.noArrayIndent && !isblockseq && level > 0) {
|
||
writeBlockSequence(state, level - 1, state.dump, compact);
|
||
} else {
|
||
writeBlockSequence(state, level, state.dump, compact);
|
||
}
|
||
if (duplicate) {
|
||
state.dump = '&ref_' + duplicateIndex + state.dump;
|
||
}
|
||
} else {
|
||
writeFlowSequence(state, level, state.dump);
|
||
if (duplicate) {
|
||
state.dump = '&ref_' + duplicateIndex + ' ' + state.dump;
|
||
}
|
||
}
|
||
} else if (type === '[object String]') {
|
||
if (state.tag !== '?') {
|
||
writeScalar(state, state.dump, level, iskey, inblock);
|
||
}
|
||
} else if (type === '[object Undefined]') {
|
||
return false;
|
||
} else {
|
||
if (state.skipInvalid) return false;
|
||
throw new exception('unacceptable kind of an object to dump ' + type);
|
||
}
|
||
|
||
if (state.tag !== null && state.tag !== '?') {
|
||
// Need to encode all characters except those allowed by the spec:
|
||
//
|
||
// [35] ns-dec-digit ::= [#x30-#x39] /* 0-9 */
|
||
// [36] ns-hex-digit ::= ns-dec-digit
|
||
// | [#x41-#x46] /* A-F */ | [#x61-#x66] /* a-f */
|
||
// [37] ns-ascii-letter ::= [#x41-#x5A] /* A-Z */ | [#x61-#x7A] /* a-z */
|
||
// [38] ns-word-char ::= ns-dec-digit | ns-ascii-letter | “-”
|
||
// [39] ns-uri-char ::= “%” ns-hex-digit ns-hex-digit | ns-word-char | “#”
|
||
// | “;” | “/” | “?” | “:” | “@” | “&” | “=” | “+” | “$” | “,”
|
||
// | “_” | “.” | “!” | “~” | “*” | “'” | “(” | “)” | “[” | “]”
|
||
//
|
||
// Also need to encode '!' because it has special meaning (end of tag prefix).
|
||
//
|
||
tagStr = encodeURI(
|
||
state.tag[0] === '!' ? state.tag.slice(1) : state.tag
|
||
).replace(/!/g, '%21');
|
||
|
||
if (state.tag[0] === '!') {
|
||
tagStr = '!' + tagStr;
|
||
} else if (tagStr.slice(0, 18) === 'tag:yaml.org,2002:') {
|
||
tagStr = '!!' + tagStr.slice(18);
|
||
} else {
|
||
tagStr = '!<' + tagStr + '>';
|
||
}
|
||
|
||
state.dump = tagStr + ' ' + state.dump;
|
||
}
|
||
}
|
||
|
||
return true;
|
||
}
|
||
|
||
function getDuplicateReferences(object, state) {
|
||
var objects = [],
|
||
duplicatesIndexes = [],
|
||
index,
|
||
length;
|
||
|
||
inspectNode(object, objects, duplicatesIndexes);
|
||
|
||
for (index = 0, length = duplicatesIndexes.length; index < length; index += 1) {
|
||
state.duplicates.push(objects[duplicatesIndexes[index]]);
|
||
}
|
||
state.usedDuplicates = new Array(length);
|
||
}
|
||
|
||
function inspectNode(object, objects, duplicatesIndexes) {
|
||
var objectKeyList,
|
||
index,
|
||
length;
|
||
|
||
if (object !== null && typeof object === 'object') {
|
||
index = objects.indexOf(object);
|
||
if (index !== -1) {
|
||
if (duplicatesIndexes.indexOf(index) === -1) {
|
||
duplicatesIndexes.push(index);
|
||
}
|
||
} else {
|
||
objects.push(object);
|
||
|
||
if (Array.isArray(object)) {
|
||
for (index = 0, length = object.length; index < length; index += 1) {
|
||
inspectNode(object[index], objects, duplicatesIndexes);
|
||
}
|
||
} else {
|
||
objectKeyList = Object.keys(object);
|
||
|
||
for (index = 0, length = objectKeyList.length; index < length; index += 1) {
|
||
inspectNode(object[objectKeyList[index]], objects, duplicatesIndexes);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
function dump$1(input, options) {
|
||
options = options || {};
|
||
|
||
var state = new State(options);
|
||
|
||
if (!state.noRefs) getDuplicateReferences(input, state);
|
||
|
||
var value = input;
|
||
|
||
if (state.replacer) {
|
||
value = state.replacer.call({ '': value }, '', value);
|
||
}
|
||
|
||
if (writeNode(state, 0, value, true, true)) return state.dump + '\n';
|
||
|
||
return '';
|
||
}
|
||
|
||
var dump_1 = dump$1;
|
||
|
||
var dumper = {
|
||
dump: dump_1
|
||
};
|
||
|
||
function renamed(from, to) {
|
||
return function () {
|
||
throw new Error('Function yaml.' + from + ' is removed in js-yaml 4. ' +
|
||
'Use yaml.' + to + ' instead, which is now safe by default.');
|
||
};
|
||
}
|
||
|
||
|
||
var Type = type;
|
||
var Schema = schema;
|
||
var FAILSAFE_SCHEMA = failsafe;
|
||
var JSON_SCHEMA = json;
|
||
var CORE_SCHEMA = core;
|
||
var DEFAULT_SCHEMA = _default;
|
||
var load = loader.load;
|
||
var loadAll = loader.loadAll;
|
||
var dump = dumper.dump;
|
||
var YAMLException = exception;
|
||
|
||
// Re-export all types in case user wants to create custom schema
|
||
var types = {
|
||
binary: binary,
|
||
float: js_yaml_float,
|
||
map: map,
|
||
null: _null,
|
||
pairs: pairs,
|
||
set: set,
|
||
timestamp: timestamp,
|
||
bool: bool,
|
||
int: js_yaml_int,
|
||
merge: merge,
|
||
omap: omap,
|
||
seq: seq,
|
||
str: str
|
||
};
|
||
|
||
// Removed functions from JS-YAML 3.0.x
|
||
var safeLoad = renamed('safeLoad', 'load');
|
||
var safeLoadAll = renamed('safeLoadAll', 'loadAll');
|
||
var safeDump = renamed('safeDump', 'dump');
|
||
|
||
var jsYaml = {
|
||
Type: Type,
|
||
Schema: Schema,
|
||
FAILSAFE_SCHEMA: FAILSAFE_SCHEMA,
|
||
JSON_SCHEMA: JSON_SCHEMA,
|
||
CORE_SCHEMA: CORE_SCHEMA,
|
||
DEFAULT_SCHEMA: DEFAULT_SCHEMA,
|
||
load: load,
|
||
loadAll: loadAll,
|
||
dump: dump,
|
||
YAMLException: YAMLException,
|
||
types: types,
|
||
safeLoad: safeLoad,
|
||
safeLoadAll: safeLoadAll,
|
||
safeDump: safeDump
|
||
};
|
||
|
||
/* harmony default export */ const js_yaml = (jsYaml);
|
||
|
||
|
||
;// CONCATENATED MODULE: ./src/model/equipment-module/tags/control.js
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const control = (async ({
|
||
path,
|
||
opc,
|
||
phase,
|
||
}) => {
|
||
const _controlTags = [
|
||
'Request',
|
||
'Command',
|
||
'Failure',
|
||
'Owner',
|
||
'Pause',
|
||
'Paused',
|
||
'SingleStep',
|
||
'Status',
|
||
'StepIndex',
|
||
'Unit'
|
||
].map(
|
||
name => ({
|
||
...lib_elements(phase, `ControlTag${name}`)[0],
|
||
ID: name,
|
||
}),
|
||
).map(
|
||
(
|
||
{
|
||
name,
|
||
ID,
|
||
elements
|
||
},
|
||
index,
|
||
) => ({
|
||
class: {
|
||
ID,
|
||
Name: name,
|
||
Type: get_attributes(elements).DataType,
|
||
|
||
},
|
||
parameter: get_attributes(elements)
|
||
})
|
||
)
|
||
await write_table('## control', path, _controlTags, opc)
|
||
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/equipment-module/tags/request.js
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const request = (async ({
|
||
path,
|
||
opc,
|
||
phase,
|
||
}) => {
|
||
const _classRequests = lib_elements(phase, 'RequestTag')
|
||
.map(
|
||
(
|
||
{
|
||
elements
|
||
},
|
||
index,
|
||
) => ({
|
||
class: {
|
||
ID: String(index),
|
||
Name: `REQUEST${String(index).padStart(2, '0')}`,
|
||
Type: 'Integer',
|
||
|
||
},
|
||
parameter: get_attributes(elements)
|
||
})
|
||
)
|
||
await write_table('## requests', path, _classRequests, opc)
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/equipment-module/tags/parameter.js
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const parameter = (async ({
|
||
path,
|
||
opc,
|
||
phase,
|
||
phaseClass,
|
||
}) => {
|
||
const _phaseParameters = lib_elements(phase, 'ParameterTag')
|
||
const _classParameters = lib_elements(phaseClass, 'RecipeParameter')
|
||
.map(
|
||
({ elements }, index) => ({
|
||
class: {
|
||
...get_attributes(elements),
|
||
},
|
||
parameter: {
|
||
...get_attributes(_phaseParameters[index].elements),
|
||
}
|
||
})
|
||
)
|
||
await write_table('## parameters', path, _classParameters, opc)
|
||
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/equipment-module/tags/report.js
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const report = (async ({
|
||
path,
|
||
opc,
|
||
phase,
|
||
phaseClass,
|
||
}) => {
|
||
const _phaseReports = lib_elements(phase, 'ReportTag')
|
||
const _classReports = lib_elements(phaseClass, 'ReportParameter')
|
||
.map(
|
||
({ elements }, index) => ({
|
||
class: {
|
||
...get_attributes(elements),
|
||
},
|
||
parameter: {
|
||
...get_attributes(_phaseReports[index].elements),
|
||
}
|
||
})
|
||
)
|
||
await write_table('## reports', path, _classReports, opc, true)
|
||
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/equipment-module/phase/index.js
|
||
|
||
|
||
|
||
/* harmony default export */ const equipment_module_phase = ((model, phaseName) =>
|
||
lib_elements(
|
||
lib_elements(
|
||
model,
|
||
'AreaModel',
|
||
)[0],
|
||
'EquipmentModule',
|
||
).find(
|
||
(equipmentModule) =>
|
||
texts(
|
||
equipmentModule,
|
||
'UniqueName',
|
||
)[0] === phaseName,
|
||
));
|
||
|
||
;// CONCATENATED MODULE: ./src/model/equipment-module/phase-class/index.js
|
||
|
||
|
||
|
||
/* harmony default export */ const phase_class = ((model, className ) =>
|
||
lib_elements(
|
||
lib_elements(
|
||
model,
|
||
'AreaModel',
|
||
)[0],
|
||
'RecipePhase',
|
||
)
|
||
.find(
|
||
(recipePhase) =>
|
||
texts(
|
||
recipePhase,
|
||
'UniqueName',
|
||
)[0] === className
|
||
));
|
||
|
||
;// CONCATENATED MODULE: ./src/lib/read-if-exists.js
|
||
|
||
|
||
|
||
/* harmony default export */ const read_if_exists = (async (fileName, content) => {
|
||
if ((await exists(fileName))) {
|
||
return (await external_fs_namespaceObject.promises.readFile(
|
||
fileName,
|
||
)).toString()
|
||
}
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/equipment-module/index.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const equipment_module = (async ({
|
||
areaModel,
|
||
equipmentModuleName,
|
||
unitName,
|
||
opc,
|
||
root,
|
||
}) => {
|
||
|
||
const path = `${root}/equipment-modules/${equipmentModuleName}.md`;
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`# ${equipmentModuleName}\n\n**equipment module**\n\n`,
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`unit: [${unitName}](../units/${unitName}.md)\n\n`,
|
||
)
|
||
|
||
const phase = equipment_module_phase(
|
||
areaModel,
|
||
equipmentModuleName
|
||
)
|
||
|
||
const phaseClass = phase_class(
|
||
areaModel,
|
||
texts(phase, 'RecipePhase')[0]
|
||
)
|
||
|
||
const recipePhase =
|
||
texts(
|
||
phase,
|
||
'RecipePhase'
|
||
)[0]
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/recipe-phases/${recipePhase}.md`,
|
||
`* [${unitName}](../units/${unitName}.md)\n`,
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/recipe-phases/${recipePhase}.md`,
|
||
` * [${equipmentModuleName}](../equipment-modules/${equipmentModuleName}.md)\n`,
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`recipe phase: [${recipePhase}](../recipe-phases/${recipePhase}.md)\n\n`,
|
||
)
|
||
|
||
await write_attributes(
|
||
phase,
|
||
path,
|
||
)
|
||
|
||
await control({
|
||
path,
|
||
phase,
|
||
opc,
|
||
})
|
||
|
||
await request({
|
||
path,
|
||
phase,
|
||
opc,
|
||
})
|
||
|
||
await parameter({
|
||
path,
|
||
phase,
|
||
opc,
|
||
phaseClass,
|
||
})
|
||
|
||
await report({
|
||
path,
|
||
phase,
|
||
opc,
|
||
phaseClass,
|
||
})
|
||
|
||
const server = texts(
|
||
lib_elements(phase, 'ControlTagRequest')[0],
|
||
'Server',
|
||
)[0]
|
||
|
||
const servers = js_yaml.load(
|
||
await external_fs_namespaceObject.promises.readFile('servers.yml', 'utf8'),
|
||
)
|
||
if (servers[server]) {
|
||
const sequenceFile = (servers[server].sequenceFile === 'recipePhase')
|
||
? `${recipePhase.toLowerCase()}`
|
||
: `${equipmentModuleName.replace(/_/, '/').toLowerCase()}`
|
||
|
||
const sequenceUrl = `${servers[server].sequenceUrl}/${sequenceFile}.md`
|
||
const sequence = await read_if_exists(sequenceUrl)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`\n## sequence\n\n`
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
sequence
|
||
? `* [${sequenceFile}](../../${sequenceUrl})\n`
|
||
: `* undefined\n`
|
||
)
|
||
}
|
||
});
|
||
|
||
|
||
;// CONCATENATED MODULE: ./src/model/equipment-module/equipment-modules.js
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const equipment_modules = (async (
|
||
{
|
||
areaModel,
|
||
unit,
|
||
unitName,
|
||
opc,
|
||
root,
|
||
}
|
||
) => {
|
||
|
||
const path = await read_me({
|
||
title: 'equipment modules',
|
||
root: `${root}/equipment-modules`,
|
||
})
|
||
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`* unit: [${unitName}](../units/${unitName}.md)\n`
|
||
)
|
||
|
||
for (
|
||
const equipmentModuleName of texts(
|
||
unit,
|
||
'ConfiguredEquipmentModuleName',
|
||
)
|
||
) {
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/units/${unitName}.md`,
|
||
` * [${equipmentModuleName}](../equipment-modules/${equipmentModuleName}.md)\n`
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
` * [${equipmentModuleName}](./${equipmentModuleName}.md)\n`
|
||
)
|
||
|
||
await equipment_module({
|
||
areaModel,
|
||
equipmentModuleName,
|
||
unitName,
|
||
opc,
|
||
root,
|
||
})
|
||
}
|
||
});
|
||
;// CONCATENATED MODULE: ./src/model/unit/index.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const model_unit = (async ({
|
||
areaModel,
|
||
processCellName,
|
||
unitName,
|
||
opc,
|
||
root
|
||
}) => {
|
||
const path = `${root}/units/${unitName}.md`;
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`# ${unitName}\n\n**unit**\n\n`,
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`process cell: [${processCellName}](../process-cells/${processCellName}.md)\n\n`,
|
||
)
|
||
|
||
|
||
const unit = lib_elements(
|
||
lib_elements(
|
||
areaModel,
|
||
'AreaModel',
|
||
)[0],
|
||
'Unit',
|
||
)
|
||
.find(
|
||
(unit) =>
|
||
texts(
|
||
unit,
|
||
'UniqueName',
|
||
)[0] === unitName,
|
||
)
|
||
await attributes({
|
||
areaModel,
|
||
opc,
|
||
unit,
|
||
unitName,
|
||
path,
|
||
})
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`\n## equipment modules\n\n`
|
||
)
|
||
await equipment_modules({
|
||
areaModel,
|
||
unit,
|
||
unitName,
|
||
opc,
|
||
root,
|
||
})
|
||
|
||
});
|
||
|
||
|
||
;// CONCATENATED MODULE: ./src/model/unit/units.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const units = (async (
|
||
{
|
||
areaModel,
|
||
opc,
|
||
root,
|
||
processCell,
|
||
processCellName,
|
||
processCellPath,
|
||
}
|
||
) => {
|
||
const path = await read_me({
|
||
title: 'units',
|
||
root: `${root}/units`,
|
||
})
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`* process cell: [${processCellName}](../process-cells/${processCellName}.md)\n`
|
||
)
|
||
|
||
for(
|
||
const unitName of texts(
|
||
processCell,
|
||
'ConfiguredUnitName',
|
||
)
|
||
) {
|
||
const unit = lib_elements(
|
||
lib_elements(
|
||
areaModel,
|
||
'AreaModel',
|
||
)[0],
|
||
'Unit',
|
||
)
|
||
.find(
|
||
(unit) =>
|
||
texts(
|
||
unit,
|
||
'UniqueName',
|
||
)[0] === unitName,
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
processCellPath,
|
||
`* [${unitName}](../units/${unitName}.md)\n`
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
` * [${unitName}](./${unitName}.md)\n`
|
||
)
|
||
await model_unit({
|
||
areaModel,
|
||
processCellName,
|
||
unitName,
|
||
opc,
|
||
root,
|
||
})
|
||
|
||
|
||
}
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/process-cell/index.js
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const process_cell = (async ({
|
||
areaName,
|
||
areaModel,
|
||
processCell,
|
||
processCellName,
|
||
opc,
|
||
root,
|
||
}) => {
|
||
const path = `${root}/process-cells/${processCellName}.md`;
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`\n# ${processCellName}\n\n**process cell**\n\n`,
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`area: [${areaName}](../areas/${areaName}.md)\n\n`,
|
||
)
|
||
|
||
|
||
await write_attributes(
|
||
processCell,
|
||
path,
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`\n## units\n\n`
|
||
)
|
||
|
||
|
||
await units({
|
||
areaModel,
|
||
processCellName,
|
||
processCell,
|
||
processCellPath: path,
|
||
opc,
|
||
root,
|
||
})
|
||
return
|
||
for (
|
||
const unitName of texts(
|
||
processCell,
|
||
'ConfiguredUnitName',
|
||
)
|
||
) {
|
||
await getUnit({
|
||
areaModel,
|
||
processCellName,
|
||
unitName,
|
||
opc,
|
||
root: `${root}/units`,
|
||
})
|
||
}
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/process-cell/process-cells.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const process_cells = (async (
|
||
{
|
||
areaModel,
|
||
areaName,
|
||
areaPath,
|
||
opc,
|
||
root,
|
||
}
|
||
) => {
|
||
|
||
const path = await read_me({
|
||
title: 'process cells',
|
||
root: `${root}/process-cells`,
|
||
})
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`* area: [${areaName}](../areas/${areaName}.md)\n`
|
||
)
|
||
|
||
for (
|
||
const processCell of lib_elements(
|
||
lib_elements(
|
||
areaModel,
|
||
'AreaModel',
|
||
)[0],
|
||
'ProcessCell',
|
||
)
|
||
) {
|
||
const processCellName = texts(processCell, 'UniqueName')[0]
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
areaPath,
|
||
`* [${processCellName}](../process-cells/${processCellName}.md)\n`
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
` * [${processCellName}](./${processCellName}.md)\n`
|
||
)
|
||
|
||
await process_cell({
|
||
processCellName,
|
||
areaName,
|
||
areaModel,
|
||
processCell,
|
||
root,
|
||
opc,
|
||
})
|
||
}
|
||
});
|
||
;// CONCATENATED MODULE: ./src/model/area/index.js
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const model_area = (async (
|
||
{
|
||
areaModel,
|
||
areaName,
|
||
area,
|
||
opc,
|
||
root,
|
||
}
|
||
) => {
|
||
const path = `${root}/areas/${areaName}.md`;
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`# ${areaName}\n\n**area**\n`,
|
||
)
|
||
await write_attributes(
|
||
area,
|
||
path,
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`\n## process cells\n\n`
|
||
)
|
||
|
||
await process_cells({
|
||
areaPath: path,
|
||
areaModel,
|
||
areaName,
|
||
opc,
|
||
root,
|
||
})
|
||
|
||
});
|
||
;// CONCATENATED MODULE: ./src/model/area/areas.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const areas = (async (
|
||
{
|
||
areaModel,
|
||
opc,
|
||
root
|
||
}
|
||
) => {
|
||
const path = await read_me({
|
||
title: 'areas',
|
||
root: `${root}/areas`,
|
||
})
|
||
|
||
for (
|
||
const area of lib_elements(
|
||
lib_elements(
|
||
areaModel,
|
||
'AreaModel',
|
||
)[0],
|
||
'Area',
|
||
)
|
||
) {
|
||
|
||
const areaName = texts(area, 'UniqueName')[0]
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`* [${areaName}](./${areaName}.md)\n`
|
||
)
|
||
|
||
await model_area({
|
||
areaName,
|
||
areaModel,
|
||
area,
|
||
opc,
|
||
root,
|
||
})
|
||
}
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/recipe-phase/recipe-parameter/recipe-parameters.js
|
||
|
||
|
||
|
||
|
||
|
||
const recipeParameter = rp => ({
|
||
name: texts(rp, 'Name')[0],
|
||
type: texts(rp, 'Type')[0],
|
||
min: texts(rp, 'Type')[0] === 'Integer'
|
||
? texts(rp, 'IntegerMin')[0]
|
||
: texts(rp, 'Type')[0] === 'Real'
|
||
? texts(rp, 'RealMin')[0]
|
||
: '',
|
||
max: texts(rp, 'Type')[0] === 'Integer'
|
||
? texts(rp, 'IntegerMax')[0]
|
||
: texts(rp, 'Type')[0] === 'Real'
|
||
? texts(rp, 'RealMax')[0]
|
||
: '',
|
||
default: texts(rp, 'Type')[0] === 'Integer'
|
||
? texts(rp, 'IntegerDefault')[0]
|
||
: texts(rp, 'Type')[0] === 'Real'
|
||
? texts(rp, 'RealDefault')[0]
|
||
: texts(rp, 'Type')[0] === 'String'
|
||
? texts(rp, 'StringDefault')[0]
|
||
: texts(rp, 'Type')[0] === 'Enumeration'
|
||
? texts(rp, 'EnumerationDefault')[0]
|
||
: (() => {throw new Error(`no default ${texts(rp, 'Type')[0]}`)})(),
|
||
eu: texts(rp, 'Type')[0] === 'Enumeration'
|
||
? texts(rp, 'EnumerationSetName')[0]
|
||
: texts(rp, 'EngineeringUnits')[0]
|
||
|
||
})
|
||
|
||
/* harmony default export */ const recipe_parameters = (async (
|
||
{
|
||
areaModel,
|
||
recipePhase,
|
||
path,
|
||
}
|
||
) => {
|
||
|
||
for (
|
||
const rp of lib_elements(
|
||
recipePhase,
|
||
'RecipeParameter',
|
||
).map(recipeParameter)
|
||
) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`|${rp.name.padEnd(40)}|${rp.type.padEnd(22)}|${rp.eu.padEnd(22)}|${rp.min.padEnd(22)}|${rp.max.padEnd(22)}|${rp.default.padEnd(22)}|
|
||
`
|
||
)
|
||
|
||
}
|
||
});
|
||
;// CONCATENATED MODULE: ./src/model/recipe-phase/report-parameter/report-parameters.js
|
||
|
||
|
||
|
||
|
||
|
||
const reportParameter = rp => ({
|
||
name: texts(rp, 'Name')[0],
|
||
type: texts(rp, 'Type')[0],
|
||
eu: texts(rp, 'Type')[0] === 'Enumeration'
|
||
? texts(rp, 'EnumerationSetName')[0]
|
||
: texts(rp, 'EngineeringUnits')[0]
|
||
|
||
})
|
||
|
||
/* harmony default export */ const report_parameters = (async (
|
||
{
|
||
recipePhase,
|
||
path,
|
||
}
|
||
) => {
|
||
|
||
for (
|
||
const rp of lib_elements(
|
||
recipePhase,
|
||
'ReportParameter',
|
||
).map(reportParameter)
|
||
) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`|${rp.name.padEnd(40)}|${rp.type.padEnd(22)}|${rp.eu.padEnd(22)}|
|
||
`
|
||
)
|
||
}
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`\n`,
|
||
);
|
||
});
|
||
;// CONCATENATED MODULE: ./src/model/recipe-phase/index.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const recipe_phase = (async (
|
||
{
|
||
areaModel,
|
||
recipePhaseName,
|
||
recipePhase,
|
||
opc,
|
||
path,
|
||
}
|
||
) => {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`# ${recipePhaseName}\n\n**recipe phase**\n\n`,
|
||
)
|
||
await write_attributes(
|
||
recipePhase,
|
||
path,
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`## recipe parameters
|
||
|
||
| name | type | eu | min | max | default |
|
||
|----------------------------------------|----------------------|----------------------|----------------------|----------------------|----------------------|
|
||
`
|
||
)
|
||
await recipe_parameters({
|
||
areaModel,
|
||
recipePhaseName,
|
||
recipePhase,
|
||
path,
|
||
})
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`
|
||
## report parameters
|
||
|
||
| name | type | eu |
|
||
|----------------------------------------|----------------------|----------------------|
|
||
`
|
||
)
|
||
await report_parameters({
|
||
areaModel,
|
||
recipePhaseName,
|
||
recipePhase,
|
||
path,
|
||
})
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`## equipment modules\n\n`,
|
||
);
|
||
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/recipe-phase/recipe-phases.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const recipe_phases = (async (
|
||
{
|
||
areaModel,
|
||
opc,
|
||
root
|
||
}
|
||
) => {
|
||
const path = await read_me({
|
||
title: 'recipe phases',
|
||
root: `${root}/recipe-phases`,
|
||
})
|
||
|
||
for (
|
||
const rp of lib_elements(
|
||
lib_elements(
|
||
areaModel,
|
||
'AreaModel',
|
||
)[0],
|
||
'RecipePhase',
|
||
)
|
||
) {
|
||
|
||
const recipePhaseName = texts(rp, 'UniqueName')[0]
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`* [${recipePhaseName}](./${recipePhaseName}.md)\n`
|
||
)
|
||
|
||
await recipe_phase({
|
||
recipePhaseName,
|
||
areaModel,
|
||
recipePhase: rp,
|
||
opc,
|
||
path: `${root}/recipe-phases/${recipePhaseName}.md`,
|
||
})
|
||
}
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/enumeration-set/index.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const enumeration_set = (async (
|
||
{
|
||
enumerationSet,
|
||
enumerationSetName,
|
||
path
|
||
}
|
||
) => {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`# ${enumerationSetName}\n\nenumeration set\n`,
|
||
)
|
||
|
||
await write_attributes(
|
||
enumerationSet,
|
||
path,
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`\n### members\n\n`
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`
|
||
|name |ordinal |
|
||
|--------------------|---------------------|
|
||
`
|
||
)
|
||
for (
|
||
const member of
|
||
lib_elements(
|
||
enumerationSet,
|
||
'Member',
|
||
).map(
|
||
({ elements }) => get_attributes(elements),
|
||
)
|
||
) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`|${member.Name.padEnd(20)}|${member.Ordinal.padEnd(20)}\n`,
|
||
)
|
||
}
|
||
});
|
||
;// CONCATENATED MODULE: ./src/model/enumeration-set/enumeration-sets.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const enumeration_sets = (async (
|
||
{
|
||
areaModel,
|
||
root,
|
||
}
|
||
) => {
|
||
|
||
const path = await read_me({
|
||
title: 'enumeration sets',
|
||
root: `${root}/enumeration-sets`,
|
||
})
|
||
|
||
for (
|
||
const enumerationSet of lib_elements(
|
||
lib_elements(
|
||
areaModel,
|
||
'AreaModel',
|
||
)[0],
|
||
'EnumerationSet',
|
||
)
|
||
) {
|
||
const enumerationSetName = texts(enumerationSet, 'UniqueName')[0]
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`* [${enumerationSetName}](./${enumerationSetName}.md)\n`
|
||
)
|
||
|
||
await enumeration_set({
|
||
enumerationSetName,
|
||
areaModel,
|
||
enumerationSet,
|
||
path: `${root}/enumeration-sets/${enumerationSetName}.md`
|
||
})
|
||
}
|
||
});
|
||
;// CONCATENATED MODULE: ./src/model/server/index.js
|
||
|
||
|
||
|
||
/* harmony default export */ const model_server = (async (
|
||
{
|
||
server,
|
||
serverName,
|
||
path,
|
||
}
|
||
) => {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`# ${serverName}\n\n**server**\n`,
|
||
)
|
||
|
||
await write_attributes(
|
||
server,
|
||
path,
|
||
)
|
||
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/server/servers.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const servers = (async (
|
||
{
|
||
areaModel,
|
||
root,
|
||
}
|
||
) => {
|
||
|
||
const path = await read_me({
|
||
title: 'servers',
|
||
root: `${root}/servers`,
|
||
})
|
||
|
||
for (
|
||
const server of lib_elements(
|
||
lib_elements(
|
||
areaModel,
|
||
'AreaModel',
|
||
)[0],
|
||
'Server',
|
||
)
|
||
) {
|
||
const serverName = texts(server, 'Name')[0]
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`* [${serverName}](./${serverName}.md)\n`
|
||
)
|
||
|
||
await model_server({
|
||
serverName,
|
||
server,
|
||
path: `${root}/servers/${serverName}.md`
|
||
})
|
||
}
|
||
});
|
||
;// CONCATENATED MODULE: ./src/model/_class/index.js
|
||
|
||
|
||
|
||
/* harmony default export */ const model_class = (async (
|
||
{
|
||
_class,
|
||
className,
|
||
path,
|
||
name,
|
||
}
|
||
) => {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`# ${className}\n\n**${name}**\n`,
|
||
)
|
||
|
||
await write_attributes(
|
||
_class,
|
||
path,
|
||
)
|
||
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/_class/classes.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const classes = (async (
|
||
{
|
||
areaModel,
|
||
root,
|
||
title,
|
||
elementName,
|
||
name,
|
||
path,
|
||
}
|
||
) => {
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`* [${title}](./${title})\n`
|
||
)
|
||
|
||
const readMePath = await read_me({
|
||
title,
|
||
root: `${root}/${title}`,
|
||
})
|
||
|
||
for (
|
||
const _class of lib_elements(
|
||
lib_elements(
|
||
areaModel,
|
||
'AreaModel',
|
||
)[0],
|
||
elementName,
|
||
)
|
||
) {
|
||
const className = texts(_class, 'UniqueName')[0]
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
readMePath,
|
||
`* [${className}](./${className}.md)\n`
|
||
)
|
||
|
||
await model_class({
|
||
className,
|
||
_class,
|
||
path: `${root}/${title}/${className}.md`,
|
||
name,
|
||
})
|
||
}
|
||
});
|
||
;// CONCATENATED MODULE: ./src/model/signature-template/sign-off/permission/index.js
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const sign_off_permission = (async (
|
||
{
|
||
permission,
|
||
path,
|
||
}
|
||
) => {
|
||
const attributes = get_attributes(permission.elements)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${(attributes.DomainOrComputer || '').padEnd(20) } | ${attributes.UserOrGroup.padEnd(20)} | ${attributes.UserIsGroup.padEnd(10)} |\n`
|
||
)
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/signature-template/sign-off/permission/permissions.js
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const permissions = (async (
|
||
{
|
||
signOff,
|
||
path
|
||
}
|
||
) => {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${'Domain/Computer'.padEnd(20)} | ${'UserOrGroup'.padEnd(20)} | ${'UserIsGroup'.padEnd(10)} |\n`
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`|${'-'.repeat(22)}|${'-'.repeat(22)}|${'-'.repeat(12)}|\n`
|
||
)
|
||
|
||
for (
|
||
const permission of lib_elements(
|
||
signOff,
|
||
'Permission',
|
||
)
|
||
) {
|
||
|
||
await sign_off_permission({
|
||
permission,
|
||
path,
|
||
})
|
||
}
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/signature-template/sign-off/index.js
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const sign_off = (async (
|
||
{
|
||
signOff,
|
||
path,
|
||
}
|
||
) => {
|
||
const signOffIndex = texts(signOff, 'Index')[0]
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`#### ${signOffIndex}\n\n`
|
||
)
|
||
|
||
await write_attributes(
|
||
signOff,
|
||
path,
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`\n##### permissions\n\n`
|
||
)
|
||
|
||
await permissions({
|
||
signOff,
|
||
path,
|
||
})
|
||
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/signature-template/sign-off/sign-offs.js
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const sign_offs = (async (
|
||
{
|
||
signatureTemplate,
|
||
path
|
||
}
|
||
) => {
|
||
|
||
for (
|
||
const signOff of lib_elements(
|
||
signatureTemplate,
|
||
'Signoff',
|
||
)
|
||
) {
|
||
await sign_off({
|
||
signOff,
|
||
path,
|
||
})
|
||
}
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/signature-template/index.js
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const signature_template = (async (
|
||
{
|
||
signatureTemplate,
|
||
signatureTemplateName,
|
||
path,
|
||
}
|
||
) => {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`# ${signatureTemplateName}\n\n**signature-template**\n`,
|
||
)
|
||
|
||
await write_attributes(
|
||
signatureTemplate,
|
||
path,
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`\n### sign offs\n\n`
|
||
)
|
||
|
||
await sign_offs({
|
||
signatureTemplate,
|
||
path,
|
||
})
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/signature-template/signature-templates.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const signature_templates = (async (
|
||
{
|
||
areaModel,
|
||
root,
|
||
}
|
||
) => {
|
||
|
||
const path = await read_me({
|
||
title: 'signature templates',
|
||
root: `${root}/signature-templates`,
|
||
})
|
||
|
||
for (
|
||
const signatureTemplate of lib_elements(
|
||
lib_elements(
|
||
areaModel,
|
||
'AreaModel',
|
||
)[0],
|
||
'SignatureTemplate',
|
||
)
|
||
) {
|
||
const signatureTemplateName = texts(signatureTemplate, 'Name')[0]
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`* [${signatureTemplateName}](./${signatureTemplateName}.md)\n`
|
||
)
|
||
|
||
await signature_template({
|
||
signatureTemplateName,
|
||
signatureTemplate,
|
||
path: `${root}/signature-templates/${signatureTemplateName}.md`
|
||
})
|
||
}
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/command-verification-policy/command-verification-policies.js
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const command_verification_policies = (async (
|
||
{
|
||
areaModel,
|
||
root,
|
||
}
|
||
) => {
|
||
|
||
const path = await read_me({
|
||
title: 'command verification policies',
|
||
root: `${root}/command-verification-policies`,
|
||
})
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${'command'.padEnd(40)} | ${'Signature Template'.padEnd(20)} |\n`
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`|${'-'.repeat(42)}|${'-'.repeat(22)}|\n`
|
||
)
|
||
|
||
|
||
for (
|
||
const commandVerificationPolicy of lib_elements(
|
||
lib_elements(
|
||
areaModel,
|
||
'AreaModel',
|
||
)[0],
|
||
'CommandVerificationPolicies',
|
||
)[0].elements
|
||
) {
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`|${commandVerificationPolicy.name.padEnd(40)}| ${(lib_elements && texts(commandVerificationPolicy, 'SignatureTemplateName' )[0] || '').padEnd(20)} \n`
|
||
)
|
||
}
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/recipe-approvals/recipe-approvals-process.js
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const recipe_approvals_process = (async (
|
||
{
|
||
areaModel,
|
||
path,
|
||
}
|
||
) => {
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`\n### recipe approvals process\n\n`
|
||
)
|
||
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${'name'.padEnd(40)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${'id'.padEnd(10)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${'step order'.padEnd(10)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${'required'.padEnd(10)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${'approve template'.padEnd(10)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${'revert template'.padEnd(10)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${'description'.padEnd(10)}|\n`
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${'-'.repeat(40)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`|${'-'.repeat(10)}`
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`|${'-'.repeat(10)}`
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`|${'-'.repeat(10)}`
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`|${'-'.repeat(10)}`
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`|${'-'.repeat(10)}`
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`|${'-'.repeat(10)}\n`
|
||
)
|
||
|
||
for (
|
||
const r of lib_elements(
|
||
lib_elements(
|
||
areaModel,
|
||
'AreaModel',
|
||
)[0],
|
||
'RecipeApprovalsProcess',
|
||
)[0]
|
||
.elements.map(x=>x)
|
||
.sort(
|
||
(a, b) => Number(texts(a, 'StepOrder')[0]) - Number(texts(a, 'StepOrder')[0]),
|
||
[]
|
||
)
|
||
.map(
|
||
({ elements }) => get_attributes(elements)
|
||
)
|
||
|
||
) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${r.Name.padEnd(40)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${r.Id.padEnd(10)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${r.StepOrder.padEnd(10)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${r.Required.padEnd(10)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${(r.ApproveSignatureTemplateName ||'').padEnd(10)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${(r.RevertSignatureTemplateName ||'').padEnd(10)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${(r.Description || '').padEnd(10)}|\n`
|
||
)
|
||
|
||
}
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/recipe-approvals/expedited-approvals-process.js
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const expedited_approvals_process = (async (
|
||
{
|
||
areaModel,
|
||
root,
|
||
path,
|
||
}
|
||
) => {
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`\n### expedited approvals process\n\n`
|
||
)
|
||
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${'name'.padEnd(40)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${'id'.padEnd(10)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${'step order'.padEnd(10)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${'required'.padEnd(10)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${'approve template'.padEnd(10)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${'revert template'.padEnd(10)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${'description'.padEnd(10)}|\n`
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${'-'.repeat(40)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`|${'-'.repeat(10)}`
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`|${'-'.repeat(10)}`
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`|${'-'.repeat(10)}`
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`|${'-'.repeat(10)}`
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`|${'-'.repeat(10)}`
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`|${'-'.repeat(10)}\n`
|
||
)
|
||
|
||
for (
|
||
const r of lib_elements(
|
||
lib_elements(
|
||
areaModel,
|
||
'AreaModel',
|
||
)[0],
|
||
'ExpeditedApprovalsProcess',
|
||
)[0]
|
||
.elements.map(x=>x)
|
||
.sort(
|
||
(a, b) => Number(texts(a, 'StepOrder')[0]) - Number(texts(a, 'StepOrder')[0]),
|
||
[]
|
||
)
|
||
.map(
|
||
({ elements }) => get_attributes(elements)
|
||
)
|
||
|
||
) {
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${r.Name.padEnd(40)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${r.Id.padEnd(10)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${r.StepOrder.padEnd(10)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${r.Required.padEnd(10)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${(r.ApproveSignatureTemplateName ||'').padEnd(10)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${(r.RevertSignatureTemplateName ||'').padEnd(10)} `
|
||
)
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`| ${(r.Description || '').padEnd(10)}|\n`
|
||
)
|
||
|
||
}
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/recipe-approvals/recipe-approvals.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
/* harmony default export */ const recipe_approvals = (async (
|
||
{
|
||
areaModel,
|
||
root,
|
||
}
|
||
) => {
|
||
|
||
const path = await read_me({
|
||
title: 'recipe approvals',
|
||
root: `${root}/recipe-approvals`,
|
||
})
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`\n### recipe approvals process configured\n\n`
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`${texts(
|
||
lib_elements(
|
||
areaModel,
|
||
'AreaModel',
|
||
)[0],
|
||
'RecipeApprovalProcessConfigured',
|
||
)[0]}\n`
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`\n### enable recipe versioning\n\n`
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`${texts(
|
||
lib_elements(
|
||
areaModel,
|
||
'AreaModel',
|
||
)[0],
|
||
'EnableRecipeVersioning',
|
||
)[0]}\n`
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`\n### security authority identifier\n\n`
|
||
)
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`${texts(
|
||
lib_elements(
|
||
areaModel,
|
||
'AreaModel',
|
||
)[0],
|
||
'SecurityAuthorityIdentifier',
|
||
)[0]}\n`
|
||
)
|
||
|
||
await recipe_approvals_process({
|
||
areaModel,
|
||
root: `${root}/recipe-approvals`,
|
||
path
|
||
})
|
||
await expedited_approvals_process({
|
||
areaModel,
|
||
root: `${root}/recipe-approvals`,
|
||
path
|
||
})
|
||
|
||
});
|
||
|
||
;// CONCATENATED MODULE: ./src/model/index.js
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
const model = async ({
|
||
fileName,
|
||
opcFile,
|
||
root,
|
||
}) => {
|
||
const xml = await
|
||
external_fs_namespaceObject.promises.readFile(
|
||
fileName,
|
||
'utf8',
|
||
)
|
||
const opc = opcFile
|
||
&& JSON.parse(
|
||
(
|
||
await external_fs_namespaceObject.promises.readFile(
|
||
opcFile,
|
||
'utf8',
|
||
)
|
||
).replace(/^\uFEFF/, '')
|
||
)
|
||
|
||
const areaModel = JSON.parse(
|
||
lib_default().xml2json(xml, { compact: false, spaces: 2 })
|
||
)
|
||
|
||
const path = await read_me({
|
||
title: 'batch documentation',
|
||
root,
|
||
})
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
`${root}/README.md`,
|
||
`## area model\n\n`
|
||
)
|
||
|
||
await write_attributes(
|
||
areaModel.elements[0],
|
||
path
|
||
)
|
||
|
||
|
||
await recipe_phases({
|
||
areaModel,
|
||
opc,
|
||
root,
|
||
})
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`* [areas](./areas)\n`
|
||
)
|
||
|
||
await areas({
|
||
areaModel,
|
||
opc,
|
||
root,
|
||
})
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`* [recipe phases](./recipe-phases)\n`
|
||
)
|
||
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`* [enumeration sets](./enumeration-sets)\n`
|
||
)
|
||
|
||
await enumeration_sets({
|
||
areaModel,
|
||
opc,
|
||
root,
|
||
})
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`* [servers](./servers)\n`
|
||
)
|
||
await servers({
|
||
areaModel,
|
||
opc,
|
||
root,
|
||
})
|
||
|
||
await classes({
|
||
path,
|
||
areaModel,
|
||
root,
|
||
title: 'process-cell-classes',
|
||
elementName: 'ProcessCellClass',
|
||
name: 'process cell class',
|
||
})
|
||
await classes({
|
||
path,
|
||
areaModel,
|
||
root,
|
||
title: 'unit-classes',
|
||
elementName: 'UnitClass',
|
||
name: 'unit class',
|
||
})
|
||
await classes({
|
||
path,
|
||
areaModel,
|
||
root,
|
||
title: 'tag-classes',
|
||
elementName: 'TagClass',
|
||
name: 'tag class',
|
||
})
|
||
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`* [signature-templates](./signature-templates)\n`
|
||
)
|
||
await signature_templates({
|
||
areaModel,
|
||
opc,
|
||
root,
|
||
})
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`* [command-verification-policies](./command-verification-policies)\n`
|
||
)
|
||
await command_verification_policies({
|
||
areaModel,
|
||
root,
|
||
})
|
||
|
||
await external_fs_namespaceObject.promises.appendFile(
|
||
path,
|
||
`* [recipe-approvals](./recipe-approvals)\n`
|
||
)
|
||
await recipe_approvals({
|
||
areaModel,
|
||
root,
|
||
})
|
||
return areaModel;
|
||
}
|
||
|
||
/* harmony default export */ const src_model = (async (
|
||
root,
|
||
equipmentModel,
|
||
kepware,
|
||
) => model({
|
||
fileName: equipmentModel,
|
||
opcFile: kepware,
|
||
root,
|
||
}));
|
||
|
||
;// CONCATENATED MODULE: ./main.js
|
||
const main_core = __nccwpck_require__(396);
|
||
|
||
|
||
|
||
src_model(
|
||
main_core.getInput('document') || 'document',
|
||
main_core.getInput('model') || 'equipment-model.axml',
|
||
main_core.getInput('opc') || 'equipment-model.json',
|
||
).then(
|
||
() => recipe(
|
||
main_core.getInput('document') || 'document',
|
||
main_core.getInput('recipes') || 'recipes',
|
||
main_core.getInput('model') || 'equipment-model.axml',
|
||
),
|
||
);
|
||
|
||
})();
|
||
|
||
module.exports = __webpack_exports__;
|
||
/******/ })()
|
||
; |