2023-05-23 19:50:14 +02:00
function t ( ) {
return {
baseUrl : null ,
breaks : ! 1 ,
extensions : null ,
gfm : ! 0 ,
headerIds : ! 0 ,
headerPrefix : "" ,
highlight : null ,
langPrefix : "language-" ,
mangle : ! 0 ,
pedantic : ! 1 ,
renderer : null ,
sanitize : ! 1 ,
sanitizer : null ,
silent : ! 1 ,
smartLists : ! 1 ,
smartypants : ! 1 ,
tokenizer : null ,
walkTokens : null ,
xhtml : ! 1 ,
} ;
}
let e = {
baseUrl : null ,
breaks : ! 1 ,
extensions : null ,
gfm : ! 0 ,
headerIds : ! 0 ,
headerPrefix : "" ,
highlight : null ,
langPrefix : "language-" ,
mangle : ! 0 ,
pedantic : ! 1 ,
renderer : null ,
sanitize : ! 1 ,
sanitizer : null ,
silent : ! 1 ,
smartLists : ! 1 ,
smartypants : ! 1 ,
tokenizer : null ,
walkTokens : null ,
xhtml : ! 1 ,
} ;
const n = /[&<>"']/ ,
i = /[&<>"']/g ,
s = /[<>"']|&(?!#?\w+;)/ ,
r = /[<>"']|&(?!#?\w+;)/g ,
a = { "&" : "&" , "<" : "<" , ">" : ">" , '"' : """ , "'" : "'" } ,
l = ( t ) => a [ t ] ;
function o ( t , e ) {
if ( e ) {
if ( n . test ( t ) ) return t . replace ( i , l ) ;
} else if ( s . test ( t ) ) return t . replace ( r , l ) ;
return t ;
}
const c = /&(#(?:\d+)|(?:#x[0-9A-Fa-f]+)|(?:\w+));?/gi ;
function p ( t ) {
return t . replace ( c , ( t , e ) =>
"colon" === ( e = e . toLowerCase ( ) )
? ":"
: "#" === e . charAt ( 0 )
? "x" === e . charAt ( 1 )
? String . fromCharCode ( parseInt ( e . substring ( 2 ) , 16 ) )
: String . fromCharCode ( + e . substring ( 1 ) )
: ""
) ;
}
const u = /(^|[^\[])\^/g ;
function d ( t , e ) {
( t = t . source || t ) , ( e = e || "" ) ;
const n = {
replace : ( e , i ) => (
( i = ( i = i . source || i ) . replace ( u , "$1" ) ) , ( t = t . replace ( e , i ) ) , n
) ,
getRegex : ( ) => new RegExp ( t , e ) ,
} ;
return n ;
}
const h = /[^\w:]/g ,
g = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i ;
function m ( t , e , n ) {
if ( t ) {
let t ;
try {
t = decodeURIComponent ( p ( n ) ) . replace ( h , "" ) . toLowerCase ( ) ;
} catch ( t ) {
return null ;
}
if (
0 === t . indexOf ( "javascript:" ) ||
0 === t . indexOf ( "vbscript:" ) ||
0 === t . indexOf ( "data:" )
)
return null ;
}
e &&
! g . test ( n ) &&
( n = ( function ( t , e ) {
f [ " " + t ] ||
( k . test ( t ) ? ( f [ " " + t ] = t + "/" ) : ( f [ " " + t ] = S ( t , "/" , ! 0 ) ) ) ;
t = f [ " " + t ] ;
const n = - 1 === t . indexOf ( ":" ) ;
return "//" === e . substring ( 0 , 2 )
? n
? e
: t . replace ( w , "$1" ) + e
: "/" === e . charAt ( 0 )
? n
? e
: t . replace ( x , "$1" ) + e
: t + e ;
} ) ( e , n ) ) ;
try {
n = encodeURI ( n ) . replace ( /%25/g , "%" ) ;
} catch ( t ) {
return null ;
}
return n ;
}
const f = { } ,
k = /^[^:]+:\/*[^/]*$/ ,
w = /^([^:]+:)[\s\S]*$/ ,
x = /^([^:]+:\/*[^/]*)[\s\S]*$/ ;
const b = { exec : function ( ) { } } ;
function y ( t ) {
let e ,
n ,
i = 1 ;
for ( ; i < arguments . length ; i ++ )
for ( n in ( ( e = arguments [ i ] ) , e ) )
Object . prototype . hasOwnProperty . call ( e , n ) && ( t [ n ] = e [ n ] ) ;
return t ;
}
function v ( t , e ) {
const n = t
. replace ( /\|/g , ( t , e , n ) => {
let i = ! 1 ,
s = e ;
for ( ; -- s >= 0 && "\\" === n [ s ] ; ) i = ! i ;
return i ? "|" : " |" ;
} )
. split ( / \|/ ) ;
let i = 0 ;
if (
( n [ 0 ] . trim ( ) || n . shift ( ) ,
n . length > 0 && ! n [ n . length - 1 ] . trim ( ) && n . pop ( ) ,
n . length > e )
)
n . splice ( e ) ;
else for ( ; n . length < e ; ) n . push ( "" ) ;
for ( ; i < n . length ; i ++ ) n [ i ] = n [ i ] . trim ( ) . replace ( /\\\|/g , "|" ) ;
return n ;
}
function S ( t , e , n ) {
const i = t . length ;
if ( 0 === i ) return "" ;
let s = 0 ;
for ( ; s < i ; ) {
const r = t . charAt ( i - s - 1 ) ;
if ( r !== e || n ) {
if ( r === e || ! n ) break ;
s ++ ;
} else s ++ ;
}
return t . substr ( 0 , i - s ) ;
}
function T ( t ) {
t &&
t . sanitize &&
! t . silent &&
console . warn (
"marked(): sanitize and sanitizer parameters are deprecated since version 0.7.0, should not be used and will be removed in the future. Read more here: https://marked.js.org/#/USING_ADVANCED.md#options"
) ;
}
function _ ( t , e ) {
if ( e < 1 ) return "" ;
let n = "" ;
for ( ; e > 1 ; ) 1 & e && ( n += t ) , ( e >>= 1 ) , ( t += t ) ;
return n + t ;
}
function z ( t , e , n , i ) {
const s = e . href ,
r = e . title ? o ( e . title ) : null ,
a = t [ 1 ] . replace ( /\\([\[\]])/g , "$1" ) ;
if ( "!" !== t [ 0 ] . charAt ( 0 ) ) {
i . state . inLink = ! 0 ;
const t = {
type : "link" ,
raw : n ,
href : s ,
title : r ,
text : a ,
tokens : i . inlineTokens ( a , [ ] ) ,
} ;
return ( i . state . inLink = ! 1 ) , t ;
}
return { type : "image" , raw : n , href : s , title : r , text : o ( a ) } ;
}
class A {
constructor ( t ) {
this . options = t || e ;
}
space ( t ) {
const e = this . rules . block . newline . exec ( t ) ;
if ( e && e [ 0 ] . length > 0 ) return { type : "space" , raw : e [ 0 ] } ;
}
code ( t ) {
const e = this . rules . block . code . exec ( t ) ;
if ( e ) {
const t = e [ 0 ] . replace ( /^ {1,4}/gm , "" ) ;
return {
type : "code" ,
raw : e [ 0 ] ,
codeBlockStyle : "indented" ,
text : this . options . pedantic ? t : S ( t , "\n" ) ,
} ;
}
}
fences ( t ) {
const e = this . rules . block . fences . exec ( t ) ;
if ( e ) {
const t = e [ 0 ] ,
n = ( function ( t , e ) {
const n = t . match ( /^(\s+)(?:```)/ ) ;
if ( null === n ) return e ;
const i = n [ 1 ] ;
return e
. split ( "\n" )
. map ( ( t ) => {
const e = t . match ( /^\s+/ ) ;
if ( null === e ) return t ;
const [ n ] = e ;
return n . length >= i . length ? t . slice ( i . length ) : t ;
} )
. join ( "\n" ) ;
} ) ( t , e [ 3 ] || "" ) ;
return { type : "code" , raw : t , lang : e [ 2 ] ? e [ 2 ] . trim ( ) : e [ 2 ] , text : n } ;
}
}
heading ( t ) {
const e = this . rules . block . heading . exec ( t ) ;
if ( e ) {
let t = e [ 2 ] . trim ( ) ;
if ( /#$/ . test ( t ) ) {
const e = S ( t , "#" ) ;
this . options . pedantic
? ( t = e . trim ( ) )
: ( e && ! / $/ . test ( e ) ) || ( t = e . trim ( ) ) ;
}
const n = {
type : "heading" ,
raw : e [ 0 ] ,
depth : e [ 1 ] . length ,
text : t ,
tokens : [ ] ,
} ;
return this . lexer . inline ( n . text , n . tokens ) , n ;
}
}
hr ( t ) {
const e = this . rules . block . hr . exec ( t ) ;
if ( e ) return { type : "hr" , raw : e [ 0 ] } ;
}
blockquote ( t ) {
const e = this . rules . block . blockquote . exec ( t ) ;
if ( e ) {
const t = e [ 0 ] . replace ( /^ *> ?/gm , "" ) ;
return {
type : "blockquote" ,
raw : e [ 0 ] ,
tokens : this . lexer . blockTokens ( t , [ ] ) ,
text : t ,
} ;
}
}
list ( t ) {
let e = this . rules . block . list . exec ( t ) ;
if ( e ) {
let n ,
i ,
s ,
r ,
a ,
l ,
o ,
c ,
p ,
u ,
d ,
h ,
g = e [ 1 ] . trim ( ) ;
const m = g . length > 1 ,
f = {
type : "list" ,
raw : "" ,
ordered : m ,
start : m ? + g . slice ( 0 , - 1 ) : "" ,
loose : ! 1 ,
items : [ ] ,
} ;
( g = m ? ` \\ d{1,9} \\ ${ g . slice ( - 1 ) } ` : ` \\ ${ g } ` ) ,
this . options . pedantic && ( g = m ? g : "[*+-]" ) ;
const k = new RegExp ( ` ^( {0,3} ${ g } )((?: [^ \\ n]*)?(?: \\ n| $ )) ` ) ;
for (
;
t && ( ( h = ! 1 ) , ( e = k . exec ( t ) ) ) && ! this . rules . block . hr . test ( t ) ;
) {
if (
( ( n = e [ 0 ] ) ,
( t = t . substring ( n . length ) ) ,
( c = e [ 2 ] . split ( "\n" , 1 ) [ 0 ] ) ,
( p = t . split ( "\n" , 1 ) [ 0 ] ) ,
this . options . pedantic
? ( ( r = 2 ) , ( d = c . trimLeft ( ) ) )
: ( ( r = e [ 2 ] . search ( /[^ ]/ ) ) ,
( r = r > 4 ? 1 : r ) ,
( d = c . slice ( r ) ) ,
( r += e [ 1 ] . length ) ) ,
( l = ! 1 ) ,
! c &&
/^ *$/ . test ( p ) &&
( ( n += p + "\n" ) , ( t = t . substring ( p . length + 1 ) ) , ( h = ! 0 ) ) ,
! h )
) {
const e = new RegExp (
` ^ {0, ${ Math . min ( 3 , r - 1 ) } }(?:[*+-]| \\ d{1,9}[.)]) `
) ;
for (
;
t &&
( ( u = t . split ( "\n" , 1 ) [ 0 ] ) ,
( c = u ) ,
this . options . pedantic &&
( c = c . replace ( /^ {1,4}(?=( {4})*[^ ])/g , " " ) ) ,
! e . test ( c ) ) ;
) {
if ( c . search ( /[^ ]/ ) >= r || ! c . trim ( ) ) d += "\n" + c . slice ( r ) ;
else {
if ( l ) break ;
d += "\n" + c ;
}
l || c . trim ( ) || ( l = ! 0 ) ,
( n += u + "\n" ) ,
( t = t . substring ( u . length + 1 ) ) ;
}
}
f . loose || ( o ? ( f . loose = ! 0 ) : /\n *\n *$/ . test ( n ) && ( o = ! 0 ) ) ,
this . options . gfm &&
( ( i = /^\[[ xX]\] / . exec ( d ) ) ,
i && ( ( s = "[ ] " !== i [ 0 ] ) , ( d = d . replace ( /^\[[ xX]\] +/ , "" ) ) ) ) ,
f . items . push ( {
type : "list_item" ,
raw : n ,
task : ! ! i ,
checked : s ,
loose : ! 1 ,
text : d ,
} ) ,
( f . raw += n ) ;
}
( f . items [ f . items . length - 1 ] . raw = n . trimRight ( ) ) ,
( f . items [ f . items . length - 1 ] . text = d . trimRight ( ) ) ,
( f . raw = f . raw . trimRight ( ) ) ;
const w = f . items . length ;
for ( a = 0 ; a < w ; a ++ ) {
( this . lexer . state . top = ! 1 ) ,
( f . items [ a ] . tokens = this . lexer . blockTokens ( f . items [ a ] . text , [ ] ) ) ;
const t = f . items [ a ] . tokens . filter ( ( t ) => "space" === t . type ) ,
e = t . every ( ( t ) => {
const e = t . raw . split ( "" ) ;
let n = 0 ;
for ( const t of e ) if ( ( "\n" === t && ( n += 1 ) , n > 1 ) ) return ! 0 ;
return ! 1 ;
} ) ;
! f . loose && t . length && e && ( ( f . loose = ! 0 ) , ( f . items [ a ] . loose = ! 0 ) ) ;
}
return f ;
}
}
html ( t ) {
const e = this . rules . block . html . exec ( t ) ;
if ( e ) {
const t = {
type : "html" ,
raw : e [ 0 ] ,
pre :
! this . options . sanitizer &&
( "pre" === e [ 1 ] || "script" === e [ 1 ] || "style" === e [ 1 ] ) ,
text : e [ 0 ] ,
} ;
return (
this . options . sanitize &&
( ( t . type = "paragraph" ) ,
( t . text = this . options . sanitizer
? this . options . sanitizer ( e [ 0 ] )
: o ( e [ 0 ] ) ) ,
( t . tokens = [ ] ) ,
this . lexer . inline ( t . text , t . tokens ) ) ,
t
) ;
}
}
def ( t ) {
const e = this . rules . block . def . exec ( t ) ;
if ( e ) {
e [ 3 ] && ( e [ 3 ] = e [ 3 ] . substring ( 1 , e [ 3 ] . length - 1 ) ) ;
return {
type : "def" ,
tag : e [ 1 ] . toLowerCase ( ) . replace ( /\s+/g , " " ) ,
raw : e [ 0 ] ,
href : e [ 2 ] ,
title : e [ 3 ] ,
} ;
}
}
table ( t ) {
const e = this . rules . block . table . exec ( t ) ;
if ( e ) {
const t = {
type : "table" ,
header : v ( e [ 1 ] ) . map ( ( t ) => ( { text : t } ) ) ,
align : e [ 2 ] . replace ( /^ *|\| *$/g , "" ) . split ( / *\| */ ) ,
rows :
e [ 3 ] && e [ 3 ] . trim ( ) ? e [ 3 ] . replace ( /\n[ \t]*$/ , "" ) . split ( "\n" ) : [ ] ,
} ;
if ( t . header . length === t . align . length ) {
t . raw = e [ 0 ] ;
let n ,
i ,
s ,
r ,
a = t . align . length ;
for ( n = 0 ; n < a ; n ++ )
/^ *-+: *$/ . test ( t . align [ n ] )
? ( t . align [ n ] = "right" )
: /^ *:-+: *$/ . test ( t . align [ n ] )
? ( t . align [ n ] = "center" )
: /^ *:-+ *$/ . test ( t . align [ n ] )
? ( t . align [ n ] = "left" )
: ( t . align [ n ] = null ) ;
for ( a = t . rows . length , n = 0 ; n < a ; n ++ )
t . rows [ n ] = v ( t . rows [ n ] , t . header . length ) . map ( ( t ) => ( { text : t } ) ) ;
for ( a = t . header . length , i = 0 ; i < a ; i ++ )
( t . header [ i ] . tokens = [ ] ) ,
this . lexer . inlineTokens ( t . header [ i ] . text , t . header [ i ] . tokens ) ;
for ( a = t . rows . length , i = 0 ; i < a ; i ++ )
for ( r = t . rows [ i ] , s = 0 ; s < r . length ; s ++ )
( r [ s ] . tokens = [ ] ) , this . lexer . inlineTokens ( r [ s ] . text , r [ s ] . tokens ) ;
return t ;
}
}
}
lheading ( t ) {
const e = this . rules . block . lheading . exec ( t ) ;
if ( e ) {
const t = {
type : "heading" ,
raw : e [ 0 ] ,
depth : "=" === e [ 2 ] . charAt ( 0 ) ? 1 : 2 ,
text : e [ 1 ] ,
tokens : [ ] ,
} ;
return this . lexer . inline ( t . text , t . tokens ) , t ;
}
}
paragraph ( t ) {
const e = this . rules . block . paragraph . exec ( t ) ;
if ( e ) {
const t = {
type : "paragraph" ,
raw : e [ 0 ] ,
text : "\n" === e [ 1 ] . charAt ( e [ 1 ] . length - 1 ) ? e [ 1 ] . slice ( 0 , - 1 ) : e [ 1 ] ,
tokens : [ ] ,
} ;
return this . lexer . inline ( t . text , t . tokens ) , t ;
}
}
text ( t ) {
const e = this . rules . block . text . exec ( t ) ;
if ( e ) {
const t = { type : "text" , raw : e [ 0 ] , text : e [ 0 ] , tokens : [ ] } ;
return this . lexer . inline ( t . text , t . tokens ) , t ;
}
}
escape ( t ) {
const e = this . rules . inline . escape . exec ( t ) ;
if ( e ) return { type : "escape" , raw : e [ 0 ] , text : o ( e [ 1 ] ) } ;
}
tag ( t ) {
const e = this . rules . inline . tag . exec ( t ) ;
if ( e )
return (
! this . lexer . state . inLink && /^<a /i . test ( e [ 0 ] )
? ( this . lexer . state . inLink = ! 0 )
: this . lexer . state . inLink &&
/^<\/a>/i . test ( e [ 0 ] ) &&
( this . lexer . state . inLink = ! 1 ) ,
! this . lexer . state . inRawBlock &&
/^<(pre|code|kbd|script)(\s|>)/i . test ( e [ 0 ] )
? ( this . lexer . state . inRawBlock = ! 0 )
: this . lexer . state . inRawBlock &&
/^<\/(pre|code|kbd|script)(\s|>)/i . test ( e [ 0 ] ) &&
( this . lexer . state . inRawBlock = ! 1 ) ,
{
type : this . options . sanitize ? "text" : "html" ,
raw : e [ 0 ] ,
inLink : this . lexer . state . inLink ,
inRawBlock : this . lexer . state . inRawBlock ,
text : this . options . sanitize
? this . options . sanitizer
? this . options . sanitizer ( e [ 0 ] )
: o ( e [ 0 ] )
: e [ 0 ] ,
}
) ;
}
link ( t ) {
const e = this . rules . inline . link . exec ( t ) ;
if ( e ) {
const t = e [ 2 ] . trim ( ) ;
if ( ! this . options . pedantic && /^</ . test ( t ) ) {
if ( ! />$/ . test ( t ) ) return ;
const e = S ( t . slice ( 0 , - 1 ) , "\\" ) ;
if ( ( t . length - e . length ) % 2 == 0 ) return ;
} else {
const t = ( function ( t , e ) {
if ( - 1 === t . indexOf ( e [ 1 ] ) ) return - 1 ;
const n = t . length ;
let i = 0 ,
s = 0 ;
for ( ; s < n ; s ++ )
if ( "\\" === t [ s ] ) s ++ ;
else if ( t [ s ] === e [ 0 ] ) i ++ ;
else if ( t [ s ] === e [ 1 ] && ( i -- , i < 0 ) ) return s ;
return - 1 ;
} ) ( e [ 2 ] , "()" ) ;
if ( t > - 1 ) {
const n = ( 0 === e [ 0 ] . indexOf ( "!" ) ? 5 : 4 ) + e [ 1 ] . length + t ;
( e [ 2 ] = e [ 2 ] . substring ( 0 , t ) ) ,
( e [ 0 ] = e [ 0 ] . substring ( 0 , n ) . trim ( ) ) ,
( e [ 3 ] = "" ) ;
}
}
let n = e [ 2 ] ,
i = "" ;
if ( this . options . pedantic ) {
const t = /^([^'"]*[^\s])\s+(['"])(.*)\2/ . exec ( n ) ;
t && ( ( n = t [ 1 ] ) , ( i = t [ 3 ] ) ) ;
} else i = e [ 3 ] ? e [ 3 ] . slice ( 1 , - 1 ) : "" ;
return (
( n = n . trim ( ) ) ,
/^</ . test ( n ) &&
( n =
this . options . pedantic && ! />$/ . test ( t )
? n . slice ( 1 )
: n . slice ( 1 , - 1 ) ) ,
z (
e ,
{
href : n ? n . replace ( this . rules . inline . _escapes , "$1" ) : n ,
title : i ? i . replace ( this . rules . inline . _escapes , "$1" ) : i ,
} ,
e [ 0 ] ,
this . lexer
)
) ;
}
}
reflink ( t , e ) {
let n ;
if (
( n = this . rules . inline . reflink . exec ( t ) ) ||
( n = this . rules . inline . nolink . exec ( t ) )
) {
let t = ( n [ 2 ] || n [ 1 ] ) . replace ( /\s+/g , " " ) ;
if ( ( ( t = e [ t . toLowerCase ( ) ] ) , ! t || ! t . href ) ) {
const t = n [ 0 ] . charAt ( 0 ) ;
return { type : "text" , raw : t , text : t } ;
}
return z ( n , t , n [ 0 ] , this . lexer ) ;
}
}
emStrong ( t , e , n = "" ) {
let i = this . rules . inline . emStrong . lDelim . exec ( t ) ;
if ( ! i ) return ;
if ( i [ 3 ] && n . match ( /[\p{L}\p{N}]/u ) ) return ;
const s = i [ 1 ] || i [ 2 ] || "" ;
if ( ! s || ( s && ( "" === n || this . rules . inline . punctuation . exec ( n ) ) ) ) {
const n = i [ 0 ] . length - 1 ;
let s ,
r ,
a = n ,
l = 0 ;
const o =
"*" === i [ 0 ] [ 0 ]
? this . rules . inline . emStrong . rDelimAst
: this . rules . inline . emStrong . rDelimUnd ;
for (
o . lastIndex = 0 , e = e . slice ( - 1 * t . length + n ) ;
null != ( i = o . exec ( e ) ) ;
) {
if ( ( ( s = i [ 1 ] || i [ 2 ] || i [ 3 ] || i [ 4 ] || i [ 5 ] || i [ 6 ] ) , ! s ) ) continue ;
if ( ( ( r = s . length ) , i [ 3 ] || i [ 4 ] ) ) {
a += r ;
continue ;
}
if ( ( i [ 5 ] || i [ 6 ] ) && n % 3 && ! ( ( n + r ) % 3 ) ) {
l += r ;
continue ;
}
if ( ( ( a -= r ) , a > 0 ) ) continue ;
if ( ( ( r = Math . min ( r , r + a + l ) ) , Math . min ( n , r ) % 2 ) ) {
const e = t . slice ( 1 , n + i . index + r ) ;
return {
type : "em" ,
raw : t . slice ( 0 , n + i . index + r + 1 ) ,
text : e ,
tokens : this . lexer . inlineTokens ( e , [ ] ) ,
} ;
}
const e = t . slice ( 2 , n + i . index + r - 1 ) ;
return {
type : "strong" ,
raw : t . slice ( 0 , n + i . index + r + 1 ) ,
text : e ,
tokens : this . lexer . inlineTokens ( e , [ ] ) ,
} ;
}
}
}
codespan ( t ) {
const e = this . rules . inline . code . exec ( t ) ;
if ( e ) {
let t = e [ 2 ] . replace ( /\n/g , " " ) ;
const n = /[^ ]/ . test ( t ) ,
i = /^ / . test ( t ) && / $/ . test ( t ) ;
return (
n && i && ( t = t . substring ( 1 , t . length - 1 ) ) ,
( t = o ( t , ! 0 ) ) ,
{ type : "codespan" , raw : e [ 0 ] , text : t }
) ;
}
}
br ( t ) {
const e = this . rules . inline . br . exec ( t ) ;
if ( e ) return { type : "br" , raw : e [ 0 ] } ;
}
del ( t ) {
const e = this . rules . inline . del . exec ( t ) ;
if ( e )
return {
type : "del" ,
raw : e [ 0 ] ,
text : e [ 2 ] ,
tokens : this . lexer . inlineTokens ( e [ 2 ] , [ ] ) ,
} ;
}
autolink ( t , e ) {
const n = this . rules . inline . autolink . exec ( t ) ;
if ( n ) {
let t , i ;
return (
"@" === n [ 2 ]
? ( ( t = o ( this . options . mangle ? e ( n [ 1 ] ) : n [ 1 ] ) ) , ( i = "mailto:" + t ) )
: ( ( t = o ( n [ 1 ] ) ) , ( i = t ) ) ,
{
type : "link" ,
raw : n [ 0 ] ,
text : t ,
href : i ,
tokens : [ { type : "text" , raw : t , text : t } ] ,
}
) ;
}
}
url ( t , e ) {
let n ;
if ( ( n = this . rules . inline . url . exec ( t ) ) ) {
let t , i ;
if ( "@" === n [ 2 ] )
( t = o ( this . options . mangle ? e ( n [ 0 ] ) : n [ 0 ] ) ) , ( i = "mailto:" + t ) ;
else {
let e ;
do {
( e = n [ 0 ] ) , ( n [ 0 ] = this . rules . inline . _backpedal . exec ( n [ 0 ] ) [ 0 ] ) ;
} while ( e !== n [ 0 ] ) ;
( t = o ( n [ 0 ] ) ) , ( i = "www." === n [ 1 ] ? "http://" + t : t ) ;
}
return {
type : "link" ,
raw : n [ 0 ] ,
text : t ,
href : i ,
tokens : [ { type : "text" , raw : t , text : t } ] ,
} ;
}
}
inlineText ( t , e ) {
const n = this . rules . inline . text . exec ( t ) ;
if ( n ) {
let t ;
return (
( t = this . lexer . state . inRawBlock
? this . options . sanitize
? this . options . sanitizer
? this . options . sanitizer ( n [ 0 ] )
: o ( n [ 0 ] )
: n [ 0 ]
: o ( this . options . smartypants ? e ( n [ 0 ] ) : n [ 0 ] ) ) ,
{ type : "text" , raw : n [ 0 ] , text : t }
) ;
}
}
}
const E = {
newline : /^(?: *(?:\n|$))+/ ,
code : /^( {4}[^\n]+(?:\n(?: *(?:\n|$))*)?)+/ ,
fences :
/^ {0,3}(`{3,}(?=[^`\n]*\n)|~{3,})([^\n]*)\n(?:|([\s\S]*?)\n)(?: {0,3}\1[~`]* *(?=\n|$)|$)/ ,
hr : /^ {0,3}((?:- *){3,}|(?:_ *){3,}|(?:\* *){3,})(?:\n+|$)/ ,
heading : /^ {0,3}(#{1,6})(?=\s|$)(.*)(?:\n+|$)/ ,
blockquote : /^( {0,3}> ?(paragraph|[^\n]*)(?:\n|$))+/ ,
list : /^( {0,3}bull)( [^\n]+?)?(?:\n|$)/ ,
html : "^ {0,3}(?:<(script|pre|style|textarea)[\\s>][\\s\\S]*?(?:</\\1>[^\\n]*\\n+|$)|comment[^\\n]*(\\n+|$)|<\\?[\\s\\S]*?(?:\\?>\\n*|$)|<![A-Z][\\s\\S]*?(?:>\\n*|$)|<!\\[CDATA\\[[\\s\\S]*?(?:\\]\\]>\\n*|$)|</?(tag)(?: +|\\n|/?>)[\\s\\S]*?(?:(?:\\n *)+\\n|$)|<(?!script|pre|style|textarea)([a-z][\\w-]*)(?:attribute)*? */?>(?=[ \\t]*(?:\\n|$))[\\s\\S]*?(?:(?:\\n *)+\\n|$)|</(?!script|pre|style|textarea)[a-z][\\w-]*\\s*>(?=[ \\t]*(?:\\n|$))[\\s\\S]*?(?:(?:\\n *)+\\n|$))" ,
def : /^ {0,3}\[(label)\]: *(?:\n *)?<?([^\s>]+)>?(?:(?: +(?:\n *)?| *\n *)(title))? *(?:\n+|$)/ ,
table : b ,
lheading : /^([^\n]+)\n {0,3}(=+|-+) *(?:\n+|$)/ ,
_paragraph :
/^([^\n]+(?:\n(?!hr|heading|lheading|blockquote|fences|list|html|table| +\n)[^\n]+)*)/ ,
text : /^[^\n]+/ ,
_label : /(?!\s*\])(?:\\.|[^\[\]\\])+/ ,
_title : /(?:"(?:\\"?|[^"\\])*"|'[^'\n]*(?:\n[^'\n]+)*\n?'|\([^()]*\))/ ,
} ;
( E . def = d ( E . def )
. replace ( "label" , E . _label )
. replace ( "title" , E . _title )
. getRegex ( ) ) ,
( E . bullet = /(?:[*+-]|\d{1,9}[.)])/ ) ,
( E . listItemStart = d ( /^( *)(bull) */ )
. replace ( "bull" , E . bullet )
. getRegex ( ) ) ,
( E . list = d ( E . list )
. replace ( /bull/g , E . bullet )
. replace (
"hr" ,
"\\n+(?=\\1?(?:(?:- *){3,}|(?:_ *){3,}|(?:\\* *){3,})(?:\\n+|$))"
)
. replace ( "def" , "\\n+(?=" + E . def . source + ")" )
. getRegex ( ) ) ,
( E . _tag =
"address|article|aside|base|basefont|blockquote|body|caption|center|col|colgroup|dd|details|dialog|dir|div|dl|dt|fieldset|figcaption|figure|footer|form|frame|frameset|h[1-6]|head|header|hr|html|iframe|legend|li|link|main|menu|menuitem|meta|nav|noframes|ol|optgroup|option|p|param|section|source|summary|table|tbody|td|tfoot|th|thead|title|tr|track|ul" ) ,
( E . _comment = /<!--(?!-?>)[\s\S]*?(?:-->|$)/ ) ,
( E . html = d ( E . html , "i" )
. replace ( "comment" , E . _comment )
. replace ( "tag" , E . _tag )
. replace (
"attribute" ,
/ +[a-zA-Z:_][\w.:-]*(?: *= *"[^"\n]*"| *= *'[^'\n]*'| *= *[^\s"'=<>`]+)?/
)
. getRegex ( ) ) ,
( E . paragraph = d ( E . _paragraph )
. replace ( "hr" , E . hr )
. replace ( "heading" , " {0,3}#{1,6} " )
. replace ( "|lheading" , "" )
. replace ( "|table" , "" )
. replace ( "blockquote" , " {0,3}>" )
. replace ( "fences" , " {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n" )
. replace ( "list" , " {0,3}(?:[*+-]|1[.)]) " )
. replace (
"html" ,
"</?(?:tag)(?: +|\\n|/?>)|<(?:script|pre|style|textarea|!--)"
)
. replace ( "tag" , E . _tag )
. getRegex ( ) ) ,
( E . blockquote = d ( E . blockquote ) . replace ( "paragraph" , E . paragraph ) . getRegex ( ) ) ,
( E . normal = y ( { } , E ) ) ,
( E . gfm = y ( { } , E . normal , {
table :
"^ *([^\\n ].*\\|.*)\\n {0,3}(?:\\| *)?(:?-+:? *(?:\\| *:?-+:? *)*)(?:\\| *)?(?:\\n((?:(?! *\\n|hr|heading|blockquote|code|fences|list|html).*(?:\\n|$))*)\\n*|$)" ,
} ) ) ,
( E . gfm . table = d ( E . gfm . table )
. replace ( "hr" , E . hr )
. replace ( "heading" , " {0,3}#{1,6} " )
. replace ( "blockquote" , " {0,3}>" )
. replace ( "code" , " {4}[^\\n]" )
. replace ( "fences" , " {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n" )
. replace ( "list" , " {0,3}(?:[*+-]|1[.)]) " )
. replace (
"html" ,
"</?(?:tag)(?: +|\\n|/?>)|<(?:script|pre|style|textarea|!--)"
)
. replace ( "tag" , E . _tag )
. getRegex ( ) ) ,
( E . gfm . paragraph = d ( E . _paragraph )
. replace ( "hr" , E . hr )
. replace ( "heading" , " {0,3}#{1,6} " )
. replace ( "|lheading" , "" )
. replace ( "table" , E . gfm . table )
. replace ( "blockquote" , " {0,3}>" )
. replace ( "fences" , " {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n" )
. replace ( "list" , " {0,3}(?:[*+-]|1[.)]) " )
. replace (
"html" ,
"</?(?:tag)(?: +|\\n|/?>)|<(?:script|pre|style|textarea|!--)"
)
. replace ( "tag" , E . _tag )
. getRegex ( ) ) ,
( E . pedantic = y ( { } , E . normal , {
html : d (
"^ *(?:comment *(?:\\n|\\s*$)|<(tag)[\\s\\S]+?</\\1> *(?:\\n{2,}|\\s*$)|<tag(?:\"[^\"]*\"|'[^']*'|\\s[^'\"/>\\s]*)*?/?> *(?:\\n{2,}|\\s*$))"
)
. replace ( "comment" , E . _comment )
. replace (
/tag/g ,
"(?!(?:a|em|strong|small|s|cite|q|dfn|abbr|data|time|code|var|samp|kbd|sub|sup|i|b|u|mark|ruby|rt|rp|bdi|bdo|span|br|wbr|ins|del|img)\\b)\\w+(?!:|[^\\w\\s@]*@)\\b"
)
. getRegex ( ) ,
def : /^ *\[([^\]]+)\]: *<?([^\s>]+)>?(?: +(["(][^\n]+[")]))? *(?:\n+|$)/ ,
heading : /^(#{1,6})(.*)(?:\n+|$)/ ,
fences : b ,
paragraph : d ( E . normal . _paragraph )
. replace ( "hr" , E . hr )
. replace ( "heading" , " *#{1,6} *[^\n]" )
. replace ( "lheading" , E . lheading )
. replace ( "blockquote" , " {0,3}>" )
. replace ( "|fences" , "" )
. replace ( "|list" , "" )
. replace ( "|html" , "" )
. getRegex ( ) ,
} ) ) ;
const $ = {
escape : /^\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/ ,
autolink : /^<(scheme:[^\s\x00-\x1f<>]*|email)>/ ,
url : b ,
tag : "^comment|^</[a-zA-Z][\\w:-]*\\s*>|^<[a-zA-Z][\\w-]*(?:attribute)*?\\s*/?>|^<\\?[\\s\\S]*?\\?>|^<![a-zA-Z]+\\s[\\s\\S]*?>|^<!\\[CDATA\\[[\\s\\S]*?\\]\\]>" ,
link : /^!?\[(label)\]\(\s*(href)(?:\s+(title))?\s*\)/ ,
reflink : /^!?\[(label)\]\[(ref)\]/ ,
nolink : /^!?\[(ref)\](?:\[\])?/ ,
reflinkSearch : "reflink|nolink(?!\\()" ,
emStrong : {
lDelim : /^(?:\*+(?:([punct_])|[^\s*]))|^_+(?:([punct*])|([^\s_]))/ ,
rDelimAst :
/^[^_*]*?\_\_[^_*]*?\*[^_*]*?(?=\_\_)|[punct_](\*+)(?=[\s]|$)|[^punct*_\s](\*+)(?=[punct_\s]|$)|[punct_\s](\*+)(?=[^punct*_\s])|[\s](\*+)(?=[punct_])|[punct_](\*+)(?=[punct_])|[^punct*_\s](\*+)(?=[^punct*_\s])/ ,
rDelimUnd :
/^[^_*]*?\*\*[^_*]*?\_[^_*]*?(?=\*\*)|[punct*](\_+)(?=[\s]|$)|[^punct*_\s](\_+)(?=[punct*\s]|$)|[punct*\s](\_+)(?=[^punct*_\s])|[\s](\_+)(?=[punct*])|[punct*](\_+)(?=[punct*])/ ,
} ,
code : /^(`+)([^`]|[^`][\s\S]*?[^`])\1(?!`)/ ,
br : /^( {2,}|\\)\n(?!\s*$)/ ,
del : b ,
text : /^(`+|[^`])(?:(?= {2,}\n)|[\s\S]*?(?:(?=[\\<!\[`*_]|\b_|$)|[^ ](?= {2,}\n)))/ ,
punctuation : /^([\spunctuation])/ ,
} ;
function L ( t ) {
return t
. replace ( /---/g , "—" )
. replace ( /--/g , "– " )
. replace ( /(^|[-\u2014/(\[{"\s])'/g , "$1‘ " )
. replace ( /'/g , "’ " )
. replace ( /(^|[-\u2014/(\[{\u2018\s])"/g , "$1“" )
. replace ( /"/g , "”" )
. replace ( /\.{3}/g , "…" ) ;
}
function R ( t ) {
let e ,
n ,
i = "" ;
const s = t . length ;
for ( e = 0 ; e < s ; e ++ )
( n = t . charCodeAt ( e ) ) ,
Math . random ( ) > 0.5 && ( n = "x" + n . toString ( 16 ) ) ,
( i += "&#" + n + ";" ) ;
return i ;
}
( $ . _punctuation = "!\"#$%&'()+\\-.,/:;<=>?@\\[\\]`^{|}~" ) ,
( $ . punctuation = d ( $ . punctuation )
. replace ( /punctuation/g , $ . _punctuation )
. getRegex ( ) ) ,
( $ . blockSkip = /\[[^\]]*?\]\([^\)]*?\)|`[^`]*?`|<[^>]*?>/g ) ,
( $ . escapedEmSt = /\\\*|\\_/g ) ,
( $ . _comment = d ( E . _comment ) . replace ( "(?:--\x3e|$)" , "--\x3e" ) . getRegex ( ) ) ,
( $ . emStrong . lDelim = d ( $ . emStrong . lDelim )
. replace ( /punct/g , $ . _punctuation )
. getRegex ( ) ) ,
( $ . emStrong . rDelimAst = d ( $ . emStrong . rDelimAst , "g" )
. replace ( /punct/g , $ . _punctuation )
. getRegex ( ) ) ,
( $ . emStrong . rDelimUnd = d ( $ . emStrong . rDelimUnd , "g" )
. replace ( /punct/g , $ . _punctuation )
. getRegex ( ) ) ,
( $ . _escapes = /\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/g ) ,
( $ . _scheme = /[a-zA-Z][a-zA-Z0-9+.-]{1,31}/ ) ,
( $ . _email =
/[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+(@)[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(?![-_])/ ) ,
( $ . autolink = d ( $ . autolink )
. replace ( "scheme" , $ . _scheme )
. replace ( "email" , $ . _email )
. getRegex ( ) ) ,
( $ . _attribute =
/\s+[a-zA-Z:_][\w.:-]*(?:\s*=\s*"[^"]*"|\s*=\s*'[^']*'|\s*=\s*[^\s"'=<>`]+)?/ ) ,
( $ . tag = d ( $ . tag )
. replace ( "comment" , $ . _comment )
. replace ( "attribute" , $ . _attribute )
. getRegex ( ) ) ,
( $ . _label = /(?:\[(?:\\.|[^\[\]\\])*\]|\\.|`[^`]*`|[^\[\]\\`])*?/ ) ,
( $ . _href = /<(?:\\.|[^\n<>\\])+>|[^\s\x00-\x1f]*/ ) ,
( $ . _title = /"(?:\\"?|[^"\\])*"|'(?:\\'?|[^'\\])*'|\((?:\\\)?|[^)\\])*\)/ ) ,
( $ . link = d ( $ . link )
. replace ( "label" , $ . _label )
. replace ( "href" , $ . _href )
. replace ( "title" , $ . _title )
. getRegex ( ) ) ,
( $ . reflink = d ( $ . reflink )
. replace ( "label" , $ . _label )
. replace ( "ref" , E . _label )
. getRegex ( ) ) ,
( $ . nolink = d ( $ . nolink ) . replace ( "ref" , E . _label ) . getRegex ( ) ) ,
( $ . reflinkSearch = d ( $ . reflinkSearch , "g" )
. replace ( "reflink" , $ . reflink )
. replace ( "nolink" , $ . nolink )
. getRegex ( ) ) ,
( $ . normal = y ( { } , $ ) ) ,
( $ . pedantic = y ( { } , $ . normal , {
strong : {
start : /^__|\*\*/ ,
middle : /^__(?=\S)([\s\S]*?\S)__(?!_)|^\*\*(?=\S)([\s\S]*?\S)\*\*(?!\*)/ ,
endAst : /\*\*(?!\*)/g ,
endUnd : /__(?!_)/g ,
} ,
em : {
start : /^_|\*/ ,
middle : /^()\*(?=\S)([\s\S]*?\S)\*(?!\*)|^_(?=\S)([\s\S]*?\S)_(?!_)/ ,
endAst : /\*(?!\*)/g ,
endUnd : /_(?!_)/g ,
} ,
link : d ( /^!?\[(label)\]\((.*?)\)/ )
. replace ( "label" , $ . _label )
. getRegex ( ) ,
reflink : d ( /^!?\[(label)\]\s*\[([^\]]*)\]/ )
. replace ( "label" , $ . _label )
. getRegex ( ) ,
} ) ) ,
( $ . gfm = y ( { } , $ . normal , {
escape : d ( $ . escape ) . replace ( "])" , "~|])" ) . getRegex ( ) ,
_extended _email :
/[A-Za-z0-9._+-]+(@)[a-zA-Z0-9-_]+(?:\.[a-zA-Z0-9-_]*[a-zA-Z0-9])+(?![-_])/ ,
url : /^((?:ftp|https?):\/\/|www\.)(?:[a-zA-Z0-9\-]+\.?)+[^\s<]*|^email/ ,
_backpedal :
/(?:[^?!.,:;*_~()&]+|\([^)]*\)|&(?![a-zA-Z0-9]+;$)|[?!.,:;*_~)]+(?!$))+/ ,
del : /^(~~?)(?=[^\s~])([\s\S]*?[^\s~])\1(?=[^~]|$)/ ,
text : /^([`~]+|[^`~])(?:(?= {2,}\n)|(?=[a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-]+@)|[\s\S]*?(?:(?=[\\<!\[`*~_]|\b_|https?:\/\/|ftp:\/\/|www\.|$)|[^ ](?= {2,}\n)|[^a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-](?=[a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-]+@)))/ ,
} ) ) ,
( $ . gfm . url = d ( $ . gfm . url , "i" )
. replace ( "email" , $ . gfm . _extended _email )
. getRegex ( ) ) ,
( $ . breaks = y ( { } , $ . gfm , {
br : d ( $ . br ) . replace ( "{2,}" , "*" ) . getRegex ( ) ,
text : d ( $ . gfm . text )
. replace ( "\\b_" , "\\b_| {2,}\\n" )
. replace ( /\{2,\}/g , "*" )
. getRegex ( ) ,
} ) ) ;
class I {
constructor ( t ) {
( this . tokens = [ ] ) ,
( this . tokens . links = Object . create ( null ) ) ,
( this . options = t || e ) ,
( this . options . tokenizer = this . options . tokenizer || new A ( ) ) ,
( this . tokenizer = this . options . tokenizer ) ,
( this . tokenizer . options = this . options ) ,
( this . tokenizer . lexer = this ) ,
( this . inlineQueue = [ ] ) ,
( this . state = { inLink : ! 1 , inRawBlock : ! 1 , top : ! 0 } ) ;
const n = { block : E . normal , inline : $ . normal } ;
this . options . pedantic
? ( ( n . block = E . pedantic ) , ( n . inline = $ . pedantic ) )
: this . options . gfm &&
( ( n . block = E . gfm ) ,
this . options . breaks ? ( n . inline = $ . breaks ) : ( n . inline = $ . gfm ) ) ,
( this . tokenizer . rules = n ) ;
}
static get rules ( ) {
return { block : E , inline : $ } ;
}
static lex ( t , e ) {
return new I ( e ) . lex ( t ) ;
}
static lexInline ( t , e ) {
return new I ( e ) . inlineTokens ( t ) ;
}
lex ( t ) {
let e ;
for (
t = t . replace ( /\r\n|\r/g , "\n" ) . replace ( /\t/g , " " ) ,
this . blockTokens ( t , this . tokens ) ;
( e = this . inlineQueue . shift ( ) ) ;
)
this . inlineTokens ( e . src , e . tokens ) ;
return this . tokens ;
}
blockTokens ( t , e = [ ] ) {
let n , i , s , r ;
for ( this . options . pedantic && ( t = t . replace ( /^ +$/gm , "" ) ) ; t ; )
if (
! (
this . options . extensions &&
this . options . extensions . block &&
this . options . extensions . block . some (
( i ) =>
! ! ( n = i . call ( { lexer : this } , t , e ) ) &&
( ( t = t . substring ( n . raw . length ) ) , e . push ( n ) , ! 0 )
)
)
)
if ( ( n = this . tokenizer . space ( t ) ) )
( t = t . substring ( n . raw . length ) ) ,
1 === n . raw . length && e . length > 0
? ( e [ e . length - 1 ] . raw += "\n" )
: e . push ( n ) ;
else if ( ( n = this . tokenizer . code ( t ) ) )
( t = t . substring ( n . raw . length ) ) ,
( i = e [ e . length - 1 ] ) ,
! i || ( "paragraph" !== i . type && "text" !== i . type )
? e . push ( n )
: ( ( i . raw += "\n" + n . raw ) ,
( i . text += "\n" + n . text ) ,
( this . inlineQueue [ this . inlineQueue . length - 1 ] . src = i . text ) ) ;
else if ( ( n = this . tokenizer . fences ( t ) ) )
( t = t . substring ( n . raw . length ) ) , e . push ( n ) ;
else if ( ( n = this . tokenizer . heading ( t ) ) )
( t = t . substring ( n . raw . length ) ) , e . push ( n ) ;
else if ( ( n = this . tokenizer . hr ( t ) ) )
( t = t . substring ( n . raw . length ) ) , e . push ( n ) ;
else if ( ( n = this . tokenizer . blockquote ( t ) ) )
( t = t . substring ( n . raw . length ) ) , e . push ( n ) ;
else if ( ( n = this . tokenizer . list ( t ) ) )
( t = t . substring ( n . raw . length ) ) , e . push ( n ) ;
else if ( ( n = this . tokenizer . html ( t ) ) )
( t = t . substring ( n . raw . length ) ) , e . push ( n ) ;
else if ( ( n = this . tokenizer . def ( t ) ) )
( t = t . substring ( n . raw . length ) ) ,
( i = e [ e . length - 1 ] ) ,
! i || ( "paragraph" !== i . type && "text" !== i . type )
? this . tokens . links [ n . tag ] ||
( this . tokens . links [ n . tag ] = { href : n . href , title : n . title } )
: ( ( i . raw += "\n" + n . raw ) ,
( i . text += "\n" + n . raw ) ,
( this . inlineQueue [ this . inlineQueue . length - 1 ] . src = i . text ) ) ;
else if ( ( n = this . tokenizer . table ( t ) ) )
( t = t . substring ( n . raw . length ) ) , e . push ( n ) ;
else if ( ( n = this . tokenizer . lheading ( t ) ) )
( t = t . substring ( n . raw . length ) ) , e . push ( n ) ;
else {
if (
( ( s = t ) ,
this . options . extensions && this . options . extensions . startBlock )
) {
let e = 1 / 0 ;
const n = t . slice ( 1 ) ;
let i ;
this . options . extensions . startBlock . forEach ( function ( t ) {
( i = t . call ( { lexer : this } , n ) ) ,
"number" == typeof i && i >= 0 && ( e = Math . min ( e , i ) ) ;
} ) ,
e < 1 / 0 && e >= 0 && ( s = t . substring ( 0 , e + 1 ) ) ;
}
if ( this . state . top && ( n = this . tokenizer . paragraph ( s ) ) )
( i = e [ e . length - 1 ] ) ,
r && "paragraph" === i . type
? ( ( i . raw += "\n" + n . raw ) ,
( i . text += "\n" + n . text ) ,
this . inlineQueue . pop ( ) ,
( this . inlineQueue [ this . inlineQueue . length - 1 ] . src = i . text ) )
: e . push ( n ) ,
( r = s . length !== t . length ) ,
( t = t . substring ( n . raw . length ) ) ;
else if ( ( n = this . tokenizer . text ( t ) ) )
( t = t . substring ( n . raw . length ) ) ,
( i = e [ e . length - 1 ] ) ,
i && "text" === i . type
? ( ( i . raw += "\n" + n . raw ) ,
( i . text += "\n" + n . text ) ,
this . inlineQueue . pop ( ) ,
( this . inlineQueue [ this . inlineQueue . length - 1 ] . src = i . text ) )
: e . push ( n ) ;
else if ( t ) {
const e = "Infinite loop on byte: " + t . charCodeAt ( 0 ) ;
if ( this . options . silent ) {
console . error ( e ) ;
break ;
}
throw new Error ( e ) ;
}
}
return ( this . state . top = ! 0 ) , e ;
}
inline ( t , e ) {
this . inlineQueue . push ( { src : t , tokens : e } ) ;
}
inlineTokens ( t , e = [ ] ) {
let n ,
i ,
s ,
r ,
a ,
l ,
o = t ;
if ( this . tokens . links ) {
const t = Object . keys ( this . tokens . links ) ;
if ( t . length > 0 )
for (
;
null != ( r = this . tokenizer . rules . inline . reflinkSearch . exec ( o ) ) ;
)
t . includes ( r [ 0 ] . slice ( r [ 0 ] . lastIndexOf ( "[" ) + 1 , - 1 ) ) &&
( o =
o . slice ( 0 , r . index ) +
"[" +
_ ( "a" , r [ 0 ] . length - 2 ) +
"]" +
o . slice ( this . tokenizer . rules . inline . reflinkSearch . lastIndex ) ) ;
}
for ( ; null != ( r = this . tokenizer . rules . inline . blockSkip . exec ( o ) ) ; )
o =
o . slice ( 0 , r . index ) +
"[" +
_ ( "a" , r [ 0 ] . length - 2 ) +
"]" +
o . slice ( this . tokenizer . rules . inline . blockSkip . lastIndex ) ;
for ( ; null != ( r = this . tokenizer . rules . inline . escapedEmSt . exec ( o ) ) ; )
o =
o . slice ( 0 , r . index ) +
"++" +
o . slice ( this . tokenizer . rules . inline . escapedEmSt . lastIndex ) ;
for ( ; t ; )
if (
( a || ( l = "" ) ,
( a = ! 1 ) ,
! (
this . options . extensions &&
this . options . extensions . inline &&
this . options . extensions . inline . some (
( i ) =>
! ! ( n = i . call ( { lexer : this } , t , e ) ) &&
( ( t = t . substring ( n . raw . length ) ) , e . push ( n ) , ! 0 )
)
) )
)
if ( ( n = this . tokenizer . escape ( t ) ) )
( t = t . substring ( n . raw . length ) ) , e . push ( n ) ;
else if ( ( n = this . tokenizer . tag ( t ) ) )
( t = t . substring ( n . raw . length ) ) ,
( i = e [ e . length - 1 ] ) ,
i && "text" === n . type && "text" === i . type
? ( ( i . raw += n . raw ) , ( i . text += n . text ) )
: e . push ( n ) ;
else if ( ( n = this . tokenizer . link ( t ) ) )
( t = t . substring ( n . raw . length ) ) , e . push ( n ) ;
else if ( ( n = this . tokenizer . reflink ( t , this . tokens . links ) ) )
( t = t . substring ( n . raw . length ) ) ,
( i = e [ e . length - 1 ] ) ,
i && "text" === n . type && "text" === i . type
? ( ( i . raw += n . raw ) , ( i . text += n . text ) )
: e . push ( n ) ;
else if ( ( n = this . tokenizer . emStrong ( t , o , l ) ) )
( t = t . substring ( n . raw . length ) ) , e . push ( n ) ;
else if ( ( n = this . tokenizer . codespan ( t ) ) )
( t = t . substring ( n . raw . length ) ) , e . push ( n ) ;
else if ( ( n = this . tokenizer . br ( t ) ) )
( t = t . substring ( n . raw . length ) ) , e . push ( n ) ;
else if ( ( n = this . tokenizer . del ( t ) ) )
( t = t . substring ( n . raw . length ) ) , e . push ( n ) ;
else if ( ( n = this . tokenizer . autolink ( t , R ) ) )
( t = t . substring ( n . raw . length ) ) , e . push ( n ) ;
else if ( this . state . inLink || ! ( n = this . tokenizer . url ( t , R ) ) ) {
if (
( ( s = t ) ,
this . options . extensions && this . options . extensions . startInline )
) {
let e = 1 / 0 ;
const n = t . slice ( 1 ) ;
let i ;
this . options . extensions . startInline . forEach ( function ( t ) {
( i = t . call ( { lexer : this } , n ) ) ,
"number" == typeof i && i >= 0 && ( e = Math . min ( e , i ) ) ;
} ) ,
e < 1 / 0 && e >= 0 && ( s = t . substring ( 0 , e + 1 ) ) ;
}
if ( ( n = this . tokenizer . inlineText ( s , L ) ) )
( t = t . substring ( n . raw . length ) ) ,
"_" !== n . raw . slice ( - 1 ) && ( l = n . raw . slice ( - 1 ) ) ,
( a = ! 0 ) ,
( i = e [ e . length - 1 ] ) ,
i && "text" === i . type
? ( ( i . raw += n . raw ) , ( i . text += n . text ) )
: e . push ( n ) ;
else if ( t ) {
const e = "Infinite loop on byte: " + t . charCodeAt ( 0 ) ;
if ( this . options . silent ) {
console . error ( e ) ;
break ;
}
throw new Error ( e ) ;
}
} else ( t = t . substring ( n . raw . length ) ) , e . push ( n ) ;
return e ;
}
}
class C {
constructor ( t ) {
this . options = t || e ;
}
code ( t , e , n ) {
const i = ( e || "" ) . match ( /\S*/ ) [ 0 ] ;
if ( this . options . highlight ) {
const e = this . options . highlight ( t , i ) ;
null != e && e !== t && ( ( n = ! 0 ) , ( t = e ) ) ;
}
return (
( t = t . replace ( /\n$/ , "" ) + "\n" ) ,
i
? '<pre><code class="' +
this . options . langPrefix +
o ( i , ! 0 ) +
'">' +
( n ? t : o ( t , ! 0 ) ) +
"</code></pre>\n"
: "<pre><code>" + ( n ? t : o ( t , ! 0 ) ) + "</code></pre>\n"
) ;
}
blockquote ( t ) {
return "<blockquote>\n" + t + "</blockquote>\n" ;
}
html ( t ) {
return t ;
}
heading ( t , e , n , i ) {
return this . options . headerIds
? "<h" +
e +
' id="' +
this . options . headerPrefix +
i . slug ( n ) +
'">' +
t +
"</h" +
e +
">\n"
: "<h" + e + ">" + t + "</h" + e + ">\n" ;
}
hr ( ) {
return this . options . xhtml ? "<hr/>\n" : "<hr>\n" ;
}
list ( t , e , n ) {
const i = e ? "ol" : "ul" ;
return (
"<" +
i +
( e && 1 !== n ? ' start="' + n + '"' : "" ) +
">\n" +
t +
"</" +
i +
">\n"
) ;
}
listitem ( t ) {
return "<li>" + t + "</li>\n" ;
}
checkbox ( t ) {
return (
"<input " +
( t ? 'checked="" ' : "" ) +
'disabled="" type="checkbox"' +
( this . options . xhtml ? " /" : "" ) +
"> "
) ;
}
paragraph ( t ) {
return "<p>" + t + "</p>\n" ;
}
table ( t , e ) {
return (
e && ( e = "<tbody>" + e + "</tbody>" ) ,
"<table>\n<thead>\n" + t + "</thead>\n" + e + "</table>\n"
) ;
}
tablerow ( t ) {
return "<tr>\n" + t + "</tr>\n" ;
}
tablecell ( t , e ) {
const n = e . header ? "th" : "td" ;
return (
( e . align ? "<" + n + ' align="' + e . align + '">' : "<" + n + ">" ) +
t +
"</" +
n +
">\n"
) ;
}
strong ( t ) {
return "<strong>" + t + "</strong>" ;
}
em ( t ) {
return "<em>" + t + "</em>" ;
}
codespan ( t ) {
return "<code>" + t + "</code>" ;
}
br ( ) {
return this . options . xhtml ? "<br/>" : "<br>" ;
}
del ( t ) {
return "<del>" + t + "</del>" ;
}
link ( t , e , n ) {
if ( null === ( t = m ( this . options . sanitize , this . options . baseUrl , t ) ) )
return n ;
let i = '<a href="' + o ( t ) + '"' ;
return e && ( i += ' title="' + e + '"' ) , ( i += ">" + n + "</a>" ) , i ;
}
image ( t , e , n ) {
if ( null === ( t = m ( this . options . sanitize , this . options . baseUrl , t ) ) )
return n ;
let i = '<img src="' + t + '" alt="' + n + '"' ;
return (
e && ( i += ' title="' + e + '"' ) ,
( i += this . options . xhtml ? "/>" : ">" ) ,
i
) ;
}
text ( t ) {
return t ;
}
}
class M {
strong ( t ) {
return t ;
}
em ( t ) {
return t ;
}
codespan ( t ) {
return t ;
}
del ( t ) {
return t ;
}
html ( t ) {
return t ;
}
text ( t ) {
return t ;
}
link ( t , e , n ) {
return "" + n ;
}
image ( t , e , n ) {
return "" + n ;
}
br ( ) {
return "" ;
}
}
class q {
constructor ( ) {
this . seen = { } ;
}
serialize ( t ) {
return t
. toLowerCase ( )
. trim ( )
. replace ( /<[!\/a-z].*?>/gi , "" )
. replace (
/[\u2000-\u206F\u2E00-\u2E7F\\'!"#$%&()*+,./:;<=>?@[\]^`{|}~]/g ,
""
)
. replace ( /\s/g , "-" ) ;
}
getNextSafeSlug ( t , e ) {
let n = t ,
i = 0 ;
if ( this . seen . hasOwnProperty ( n ) ) {
i = this . seen [ t ] ;
do {
i ++ , ( n = t + "-" + i ) ;
} while ( this . seen . hasOwnProperty ( n ) ) ;
}
return e || ( ( this . seen [ t ] = i ) , ( this . seen [ n ] = 0 ) ) , n ;
}
slug ( t , e = { } ) {
const n = this . serialize ( t ) ;
return this . getNextSafeSlug ( n , e . dryrun ) ;
}
}
class O {
constructor ( t ) {
( this . options = t || e ) ,
( this . options . renderer = this . options . renderer || new C ( ) ) ,
( this . renderer = this . options . renderer ) ,
( this . renderer . options = this . options ) ,
( this . textRenderer = new M ( ) ) ,
( this . slugger = new q ( ) ) ;
}
static parse ( t , e ) {
return new O ( e ) . parse ( t ) ;
}
static parseInline ( t , e ) {
return new O ( e ) . parseInline ( t ) ;
}
parse ( t , e = ! 0 ) {
let n ,
i ,
s ,
r ,
a ,
l ,
o ,
c ,
u ,
d ,
h ,
g ,
m ,
f ,
k ,
w ,
x ,
b ,
y ,
v = "" ;
const S = t . length ;
for ( n = 0 ; n < S ; n ++ )
if (
( ( d = t [ n ] ) ,
this . options . extensions &&
this . options . extensions . renderers &&
this . options . extensions . renderers [ d . type ] &&
( ( y = this . options . extensions . renderers [ d . type ] . call (
{ parser : this } ,
d
) ) ,
! 1 !== y ||
! [
"space" ,
"hr" ,
"heading" ,
"code" ,
"table" ,
"blockquote" ,
"list" ,
"html" ,
"paragraph" ,
"text" ,
] . includes ( d . type ) ) )
)
v += y || "" ;
else
switch ( d . type ) {
case "space" :
continue ;
case "hr" :
v += this . renderer . hr ( ) ;
continue ;
case "heading" :
v += this . renderer . heading (
this . parseInline ( d . tokens ) ,
d . depth ,
p ( this . parseInline ( d . tokens , this . textRenderer ) ) ,
this . slugger
) ;
continue ;
case "code" :
v += this . renderer . code ( d . text , d . lang , d . escaped ) ;
continue ;
case "table" :
for ( c = "" , o = "" , r = d . header . length , i = 0 ; i < r ; i ++ )
o += this . renderer . tablecell (
this . parseInline ( d . header [ i ] . tokens ) ,
{ header : ! 0 , align : d . align [ i ] }
) ;
for (
c += this . renderer . tablerow ( o ) , u = "" , r = d . rows . length , i = 0 ;
i < r ;
i ++
) {
for ( l = d . rows [ i ] , o = "" , a = l . length , s = 0 ; s < a ; s ++ )
o += this . renderer . tablecell ( this . parseInline ( l [ s ] . tokens ) , {
header : ! 1 ,
align : d . align [ s ] ,
} ) ;
u += this . renderer . tablerow ( o ) ;
}
v += this . renderer . table ( c , u ) ;
continue ;
case "blockquote" :
( u = this . parse ( d . tokens ) ) , ( v += this . renderer . blockquote ( u ) ) ;
continue ;
case "list" :
for (
h = d . ordered ,
g = d . start ,
m = d . loose ,
r = d . items . length ,
u = "" ,
i = 0 ;
i < r ;
i ++
)
( k = d . items [ i ] ) ,
( w = k . checked ) ,
( x = k . task ) ,
( f = "" ) ,
k . task &&
( ( b = this . renderer . checkbox ( w ) ) ,
m
? k . tokens . length > 0 && "paragraph" === k . tokens [ 0 ] . type
? ( ( k . tokens [ 0 ] . text = b + " " + k . tokens [ 0 ] . text ) ,
k . tokens [ 0 ] . tokens &&
k . tokens [ 0 ] . tokens . length > 0 &&
"text" === k . tokens [ 0 ] . tokens [ 0 ] . type &&
( k . tokens [ 0 ] . tokens [ 0 ] . text =
b + " " + k . tokens [ 0 ] . tokens [ 0 ] . text ) )
: k . tokens . unshift ( { type : "text" , text : b } )
: ( f += b ) ) ,
( f += this . parse ( k . tokens , m ) ) ,
( u += this . renderer . listitem ( f , x , w ) ) ;
v += this . renderer . list ( u , h , g ) ;
continue ;
case "html" :
v += this . renderer . html ( d . text ) ;
continue ;
case "paragraph" :
v += this . renderer . paragraph ( this . parseInline ( d . tokens ) ) ;
continue ;
case "text" :
for (
u = d . tokens ? this . parseInline ( d . tokens ) : d . text ;
n + 1 < S && "text" === t [ n + 1 ] . type ;
)
( d = t [ ++ n ] ) ,
( u += "\n" + ( d . tokens ? this . parseInline ( d . tokens ) : d . text ) ) ;
v += e ? this . renderer . paragraph ( u ) : u ;
continue ;
default : {
const t = 'Token with "' + d . type + '" type was not found.' ;
if ( this . options . silent ) return void console . error ( t ) ;
throw new Error ( t ) ;
}
}
return v ;
}
parseInline ( t , e ) {
e = e || this . renderer ;
let n ,
i ,
s ,
r = "" ;
const a = t . length ;
for ( n = 0 ; n < a ; n ++ )
if (
( ( i = t [ n ] ) ,
this . options . extensions &&
this . options . extensions . renderers &&
this . options . extensions . renderers [ i . type ] &&
( ( s = this . options . extensions . renderers [ i . type ] . call (
{ parser : this } ,
i
) ) ,
! 1 !== s ||
! [
"escape" ,
"html" ,
"link" ,
"image" ,
"strong" ,
"em" ,
"codespan" ,
"br" ,
"del" ,
"text" ,
] . includes ( i . type ) ) )
)
r += s || "" ;
else
switch ( i . type ) {
case "escape" :
case "text" :
r += e . text ( i . text ) ;
break ;
case "html" :
r += e . html ( i . text ) ;
break ;
case "link" :
r += e . link ( i . href , i . title , this . parseInline ( i . tokens , e ) ) ;
break ;
case "image" :
r += e . image ( i . href , i . title , i . text ) ;
break ;
case "strong" :
r += e . strong ( this . parseInline ( i . tokens , e ) ) ;
break ;
case "em" :
r += e . em ( this . parseInline ( i . tokens , e ) ) ;
break ;
case "codespan" :
r += e . codespan ( i . text ) ;
break ;
case "br" :
r += e . br ( ) ;
break ;
case "del" :
r += e . del ( this . parseInline ( i . tokens , e ) ) ;
break ;
default : {
const t = 'Token with "' + i . type + '" type was not found.' ;
if ( this . options . silent ) return void console . error ( t ) ;
throw new Error ( t ) ;
}
}
return r ;
}
}
function P ( t , e , n ) {
if ( null == t )
throw new Error ( "marked(): input parameter is undefined or null" ) ;
if ( "string" != typeof t )
throw new Error (
"marked(): input parameter is of type " +
Object . prototype . toString . call ( t ) +
", string expected"
) ;
if (
( "function" == typeof e && ( ( n = e ) , ( e = null ) ) ,
T ( ( e = y ( { } , P . defaults , e || { } ) ) ) ,
n )
) {
const i = e . highlight ;
let s ;
try {
s = I . lex ( t , e ) ;
} catch ( t ) {
return n ( t ) ;
}
const r = function ( t ) {
let r ;
if ( ! t )
try {
e . walkTokens && P . walkTokens ( s , e . walkTokens ) , ( r = O . parse ( s , e ) ) ;
} catch ( e ) {
t = e ;
}
return ( e . highlight = i ) , t ? n ( t ) : n ( null , r ) ;
} ;
if ( ! i || i . length < 3 ) return r ( ) ;
if ( ( delete e . highlight , ! s . length ) ) return r ( ) ;
let a = 0 ;
return (
P . walkTokens ( s , function ( t ) {
"code" === t . type &&
( a ++ ,
setTimeout ( ( ) => {
i ( t . text , t . lang , function ( e , n ) {
if ( e ) return r ( e ) ;
null != n && n !== t . text && ( ( t . text = n ) , ( t . escaped = ! 0 ) ) ,
a -- ,
0 === a && r ( ) ;
} ) ;
} , 0 ) ) ;
} ) ,
void ( 0 === a && r ( ) )
) ;
}
try {
const n = I . lex ( t , e ) ;
return e . walkTokens && P . walkTokens ( n , e . walkTokens ) , O . parse ( n , e ) ;
} catch ( t ) {
if (
( ( t . message +=
"\nPlease report this to https://github.com/markedjs/marked." ) ,
e . silent )
)
return (
"<p>An error occurred:</p><pre>" + o ( t . message + "" , ! 0 ) + "</pre>"
) ;
throw t ;
}
}
( P . options = P . setOptions =
function ( t ) {
var n ;
return y ( P . defaults , t ) , ( n = P . defaults ) , ( e = n ) , P ;
} ) ,
( P . getDefaults = t ) ,
( P . defaults = e ) ,
( P . use = function ( ... t ) {
const e = y ( { } , ... t ) ,
n = P . defaults . extensions || { renderers : { } , childTokens : { } } ;
let i ;
t . forEach ( ( t ) => {
if (
( t . extensions &&
( ( i = ! 0 ) ,
t . extensions . forEach ( ( t ) => {
if ( ! t . name ) throw new Error ( "extension name required" ) ;
if ( t . renderer ) {
const e = n . renderers ? n . renderers [ t . name ] : null ;
n . renderers [ t . name ] = e
? function ( ... n ) {
let i = t . renderer . apply ( this , n ) ;
return ! 1 === i && ( i = e . apply ( this , n ) ) , i ;
}
: t . renderer ;
}
if ( t . tokenizer ) {
if ( ! t . level || ( "block" !== t . level && "inline" !== t . level ) )
throw new Error ( "extension level must be 'block' or 'inline'" ) ;
n [ t . level ]
? n [ t . level ] . unshift ( t . tokenizer )
: ( n [ t . level ] = [ t . tokenizer ] ) ,
t . start &&
( "block" === t . level
? n . startBlock
? n . startBlock . push ( t . start )
: ( n . startBlock = [ t . start ] )
: "inline" === t . level &&
( n . startInline
? n . startInline . push ( t . start )
: ( n . startInline = [ t . start ] ) ) ) ;
}
t . childTokens && ( n . childTokens [ t . name ] = t . childTokens ) ;
} ) ) ,
t . renderer )
) {
const n = P . defaults . renderer || new C ( ) ;
for ( const e in t . renderer ) {
const i = n [ e ] ;
n [ e ] = ( ... s ) => {
let r = t . renderer [ e ] . apply ( n , s ) ;
return ! 1 === r && ( r = i . apply ( n , s ) ) , r ;
} ;
}
e . renderer = n ;
}
if ( t . tokenizer ) {
const n = P . defaults . tokenizer || new A ( ) ;
for ( const e in t . tokenizer ) {
const i = n [ e ] ;
n [ e ] = ( ... s ) => {
let r = t . tokenizer [ e ] . apply ( n , s ) ;
return ! 1 === r && ( r = i . apply ( n , s ) ) , r ;
} ;
}
e . tokenizer = n ;
}
if ( t . walkTokens ) {
const n = P . defaults . walkTokens ;
e . walkTokens = function ( e ) {
t . walkTokens . call ( this , e ) , n && n . call ( this , e ) ;
} ;
}
i && ( e . extensions = n ) , P . setOptions ( e ) ;
} ) ;
} ) ,
( P . walkTokens = function ( t , e ) {
for ( const n of t )
switch ( ( e . call ( P , n ) , n . type ) ) {
case "table" :
for ( const t of n . header ) P . walkTokens ( t . tokens , e ) ;
for ( const t of n . rows ) for ( const n of t ) P . walkTokens ( n . tokens , e ) ;
break ;
case "list" :
P . walkTokens ( n . items , e ) ;
break ;
default :
P . defaults . extensions &&
P . defaults . extensions . childTokens &&
P . defaults . extensions . childTokens [ n . type ]
? P . defaults . extensions . childTokens [ n . type ] . forEach ( function ( t ) {
P . walkTokens ( n [ t ] , e ) ;
} )
: n . tokens && P . walkTokens ( n . tokens , e ) ;
}
} ) ,
( P . parseInline = function ( t , e ) {
if ( null == t )
throw new Error (
"marked.parseInline(): input parameter is undefined or null"
) ;
if ( "string" != typeof t )
throw new Error (
"marked.parseInline(): input parameter is of type " +
Object . prototype . toString . call ( t ) +
", string expected"
) ;
T ( ( e = y ( { } , P . defaults , e || { } ) ) ) ;
try {
const n = I . lexInline ( t , e ) ;
return e . walkTokens && P . walkTokens ( n , e . walkTokens ) , O . parseInline ( n , e ) ;
} catch ( t ) {
if (
( ( t . message +=
"\nPlease report this to https://github.com/markedjs/marked." ) ,
e . silent )
)
return (
"<p>An error occurred:</p><pre>" + o ( t . message + "" , ! 0 ) + "</pre>"
) ;
throw t ;
}
} ) ,
( P . Parser = O ) ,
( P . parser = O . parse ) ,
( P . Renderer = C ) ,
( P . TextRenderer = M ) ,
( P . Lexer = I ) ,
( P . lexer = I . lex ) ,
( P . Tokenizer = A ) ,
( P . Slugger = q ) ,
( P . parse = P ) ;
export default ( ) => {
let t ,
e ,
n = null ;
function i ( ) {
if ( n && ! n . closed ) n . focus ( ) ;
else {
if (
( ( n = window . open (
"about:blank" ,
"reveal.js - Notes" ,
"width=1100,height=700"
) ) ,
( n . marked = P ) ,
n . document . write (
"\x3c!--\n\tNOTE: You need to build the notes plugin after making changes to this file.\n--\x3e\n<html lang=\"en\">\n\t<head>\n\t\t<meta charset=\"utf-8\" > \ n \ n \ t \ t < title > reveal . js - Speaker View < /title>\n\n\t\t<style>\n\t\t\tbody {\n\t\t\t\tfont-family: Helvetica;\n\t\t\t\tfont-size: 18px;\n\t\t\t}\n\n\t\t\t#current-slide,\n\t\t\t#upcoming-slide,\n\t\t\t#speaker-controls {\n\t\t\t\tpadding: 6px;\n\t\t\t\tbox-sizing: border-box;\n\t\t\t\t-moz-box-sizing: border-box;\n\t\t\t}\n\n\t\t\t#current-slide iframe,\n\t\t\t#upcoming-slide iframe {\n\t\t\t\twidth: 100%;\n\t\t\t\theight: 100%;\n\t\t\t\tborder: 1px solid #ddd;\n\t\t\t}\n\n\t\t\t#current-slide .label,\n\t\t\t#upcoming-slide .label {\n\t\t\t\tposition: absolute;\n\t\t\t\ttop: 10px;\n\t\t\t\tleft: 10px;\n\t\t\t\tz-index: 2;\n\t\t\t}\n\n\t\t\t#connection-status {\n\t\t\t\tposition: absolute;\n\t\t\t\ttop: 0;\n\t\t\t\tleft: 0;\n\t\t\t\twidth: 100%;\n\t\t\t\theight: 100%;\n\t\t\t\tz-index: 20;\n\t\t\t\tpadding: 30% 20% 20% 20%;\n\t\t\t\tfont-size: 18px;\n\t\t\t\tcolor: #222;\n\t\t\t\tbackground: #fff;\n\t\t\t\ttext-align: center;\n\t\t\t\tbox-sizing: border-box;\n\t\t\t\tline-height: 1.4;\n\t\t\t}\n\n\t\t\t.overlay-element {\n\t\t\t\theight: 34px;\n\t\t\t\tline-height: 34px;\n\t\t\t\tpadding: 0 10px;\n\t\t\t\ttext-shadow: none;\n\t\t\t\tbackground: rgba( 220, 220, 220, 0.8 );\n\t\t\t\tcolor: #222;\n\t\t\t\tfont-size: 14px;\n\t\t\t}\n\n\t\t\t.overlay-element.interactive:hover {\n\t\t\t\tbackground: rgba( 220, 220, 220, 1 );\n\t\t\t}\n\n\t\t\t#current-slide {\n\t\t\t\tposition: absolute;\n\t\t\t\twidth: 60%;\n\t\t\t\theight: 100%;\n\t\t\t\ttop: 0;\n\t\t\t\tleft: 0;\n\t\t\t\tpadding-right: 0;\n\t\t\t}\n\n\t\t\t#upcoming-slide {\n\t\t\t\tposition: absolute;\n\t\t\t\twidth: 40%;\n\t\t\t\theight: 40%;\n\t\t\t\tright: 0;\n\t\t\t\ttop: 0;\n\t\t\t}\n\n\t\t\t/ * Speaker controls * / \ n \ t \ t \ t # s p e a k e r - c o n t r o l s { \ n \ t \ t \ t \ t p o s i t i o n : a b s o l u t e ; \ n \ t \ t \ t \ t t o p : 4 0 % ; \ n \ t \ t \ t \ t r i g h t : 0 ; \ n \ t \ t \ t \ t w i d t h : 4 0 % ; \ n \ t \ t \ t \ t h e i g h t : 6 0 % ; \ n \ t \ t \ t \ t o v e r f l o w : a u t o ; \ n \ t \ t \ t \ t f o n t - s i z e : 1 8 p x ; \ n \ t \ t \ t } \ n \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - t i m e . h i d d e n , \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - n o t e s . h i d d e n { \ n \ t \ t \ t \ t \ t d i s p l a y : n o n e ; \ n \ t \ t \ t \ t } \ n \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - t i m e . l a b e l , \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - p a c e . l a b e l , \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - n o t e s . l a b e l { \ n \ t \ t \ t \ t \ t t e x t - t r a n s f o r m : u p p e r c a s e ; \ n \ t \ t \ t \ t \ t f o n t - w e i g h t : n o r m a l ; \ n \ t \ t \ t \ t \ t f o n t - s i z e : 0 . 6 6 e m ; \ n \ t \ t \ t \ t \ t c o l o r : # 6 6 6 ; \ n \ t \ t \ t \ t \ t m a r g i n : 0 ; \ n \ t \ t \ t \ t } \ n \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - t i m e , . s p e a k e r - c o n t r o l s - p a c e { \ n \ t \ t \ t \ t \ t b o r d e r - b o t t o m : 1 p x s o l i d r g b a ( 2 0 0 , 2 0 0 , 2 0 0 , 0 . 5 ) ; \ n \ t \ t \ t \ t \ t m a r g i n - b o t t o m : 1 0 p x ; \ n \ t \ t \ t \ t \ t p a d d i n g : 1 0 p x 1 6 p x ; \ n \ t \ t \ t \ t \ t p a d d i n g - b o t t o m : 2 0 p x ; \ n \ t \ t \ t \ t \ t c u r s o r : p o i n t e r ; \ n \ t \ t \ t \ t } \ n \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - t i m e . r e s e t - b u t t o n { \ n \ t \ t \ t \ t \ t o p a c i t y : 0 ; \ n \ t \ t \ t \ t \ t f l o a t : r i g h t ; \ n \ t \ t \ t \ t \ t c o l o r : # 6 6 6 ; \ n \ t \ t \ t \ t \ t t e x t - d e c o r a t i o n : n o n e ; \ n \ t \ t \ t \ t } \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - t i m e : h o v e r . r e s e t - b u t t o n { \ n \ t \ t \ t \ t \ t o p a c i t y : 1 ; \ n \ t \ t \ t \ t } \ n \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - t i m e . t i m e r , \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - t i m e . c l o c k { \ n \ t \ t \ t \ t \ t w i d t h : 5 0 % ; \ n \ t \ t \ t \ t } \ n \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - t i m e . t i m e r , \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - t i m e . c l o c k , \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - t i m e . p a c i n g . h o u r s - v a l u e , \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - t i m e . p a c i n g . m i n u t e s - v a l u e , \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - t i m e . p a c i n g . s e c o n d s - v a l u e { \ n \ t \ t \ t \ t \ t f o n t - s i z e : 1 . 9 e m ; \ n \ t \ t \ t \ t } \ n \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - t i m e . t i m e r { \ n \ t \ t \ t \ t \ t f l o a t : l e f t ; \ n \ t \ t \ t \ t } \ n \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - t i m e . c l o c k { \ n \ t \ t \ t \ t \ t f l o a t : r i g h t ; \ n \ t \ t \ t \ t \ t t e x t - a l i g n : r i g h t ; \ n \ t \ t \ t \ t } \ n \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - t i m e s p a n . m u t e { \ n \ t \ t \ t \ t \ t o p a c i t y : 0 . 3 ; \ n \ t \ t \ t \ t } \ n \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - t i m e . p a c i n g - t i t l e { \ n \ t \ t \ t \ t \ t m a r g i n - t o p : 5 p x ; \ n \ t \ t \ t \ t } \ n \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - t i m e . p a c i n g . a h e a d { \ n \ t \ t \ t \ t \ t c o l o r : b l u e ; \ n \ t \ t \ t \ t } \ n \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - t i m e . p a c i n g . o n - t r a c k { \ n \ t \ t \ t \ t \ t c o l o r : g r e e n ; \ n \ t \ t \ t \ t } \ n \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - t i m e . p a c i n g . b e h i n d { \ n \ t \ t \ t \ t \ t c o l o r : r e d ; \ n \ t \ t \ t \ t } \ n \ n \ t \ t \ t \ t . s p e a k e r - c o n t r o l s - n o t e s { \ n \ t \ t \ t \ t \ t p a d d i n g : 1 0 p x 1 6 p x ; \ n \ t \ t \ t \ t } \ n \ n \ t \ t \ t \ t . s p e a k e r -
) ,
! n )
)
return void alert (
"Speaker view popup failed to open. Please make sure popups are allowed and reopen the speaker view."
) ;
! ( function ( ) {
const i = e . getConfig ( ) . url ,
s =
"string" == typeof i
? i
: window . location . protocol +
"//" +
window . location . host +
window . location . pathname +
window . location . search ;
( t = setInterval ( function ( ) {
n . postMessage (
JSON . stringify ( {
namespace : "reveal-notes" ,
type : "connect" ,
state : e . getState ( ) ,
url : s ,
} ) ,
"*"
) ;
} , 500 ) ) ,
window . addEventListener ( "message" , r ) ;
} ) ( ) ;
}
}
function s ( t ) {
let i = e . getCurrentSlide ( ) ,
s = i . querySelectorAll ( "aside.notes" ) ,
r = i . querySelector ( ".current-fragment" ) ,
a = {
namespace : "reveal-notes" ,
type : "state" ,
notes : "" ,
markdown : ! 1 ,
whitespace : "normal" ,
state : e . getState ( ) ,
} ;
if (
( i . hasAttribute ( "data-notes" ) &&
( ( a . notes = i . getAttribute ( "data-notes" ) ) , ( a . whitespace = "pre-wrap" ) ) ,
r )
) {
let t = r . querySelector ( "aside.notes" ) ;
t
? ( ( a . notes = t . innerHTML ) ,
( a . markdown = "string" == typeof t . getAttribute ( "data-markdown" ) ) ,
( s = null ) )
: r . hasAttribute ( "data-notes" ) &&
( ( a . notes = r . getAttribute ( "data-notes" ) ) ,
( a . whitespace = "pre-wrap" ) ,
( s = null ) ) ;
}
s &&
( ( a . notes = Array . from ( s )
. map ( ( t ) => t . innerHTML )
. join ( "\n" ) ) ,
( a . markdown =
s [ 0 ] && "string" == typeof s [ 0 ] . getAttribute ( "data-markdown" ) ) ) ,
n . postMessage ( JSON . stringify ( a ) , "*" ) ;
}
function r ( i ) {
if (
( function ( t ) {
try {
return window . location . origin === t . source . location . origin ;
} catch ( t ) {
return ! 1 ;
}
} ) ( i )
) {
let s = JSON . parse ( i . data ) ;
s && "reveal-notes" === s . namespace && "connected" === s . type
? ( clearInterval ( t ) , a ( ) )
: s &&
"reveal-notes" === s . namespace &&
"call" === s . type &&
( function ( t , i , s ) {
let r = e [ t ] . apply ( e , i ) ;
n . postMessage (
JSON . stringify ( {
namespace : "reveal-notes" ,
type : "return" ,
result : r ,
callId : s ,
} ) ,
"*"
) ;
} ) ( s . methodName , s . arguments , s . callId ) ;
}
}
function a ( ) {
e . on ( "slidechanged" , s ) ,
e . on ( "fragmentshown" , s ) ,
e . on ( "fragmenthidden" , s ) ,
e . on ( "overviewhidden" , s ) ,
e . on ( "overviewshown" , s ) ,
e . on ( "paused" , s ) ,
e . on ( "resumed" , s ) ,
s ( ) ;
}
return {
id : "notes" ,
init : function ( t ) {
( e = t ) ,
/receiver/i . test ( window . location . search ) ||
( null !== window . location . search . match ( /(\?|\&)notes/gi )
? i ( )
: window . addEventListener ( "message" , ( t ) => {
if ( ! n && "string" == typeof t . data ) {
let i ;
try {
i = JSON . parse ( t . data ) ;
} catch ( t ) { }
i &&
"reveal-notes" === i . namespace &&
"heartbeat" === i . type &&
( ( e = t . source ) ,
n && ! n . closed
? n . focus ( )
: ( ( n = e ) , window . addEventListener ( "message" , r ) , a ( ) ) ) ;
}
var e ;
} ) ,
e . addKeyBinding (
{ keyCode : 83 , key : "S" , description : "Speaker notes view" } ,
function ( ) {
i ( ) ;
}
) ) ;
} ,
open : i ,
} ;
} ;