weave.rxsl
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
//
// Weave Transform
//
// Author:
// Name : Hugh Field-Richards
// Email : hsfr@hsfr.org.uk
//
// Copyright 2025 Hugh Field-Richards.
//
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
//
// This is the main weave generator for AntWeave. It is written
// in Rexsel, a simplified and compact version of the XSLT language
// that is written in XML. The Rexsel code is translated at build time
// from within an Ant script.
//
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
//
// Date Who Changes
// ==========================================================================
// 14th July 2015 HSFR Created
// 10th February 2016 HSFR Major fixes to indexing and normalising variables
// 29 December 2024 HSFR Translated to Rexsel
// 7th September 2025 HSFR Updated variable and proc names
// 7th October 2025 HSFR Added AMPERSAND, BACKSPACE etc constants
//
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*- Copyright -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
//
// All copyright, database rights and other intellectual property in this
// software is the property of Hugh Field-Richards. The software is being
// released free of charge for use in accordance with the terms of open
// source initiative GNU General Public Licence.
//
//-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* Disclaimer -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
//
// The author and copyright holder does not verify or warrant the accuracy,
// completeness or suitability of this software for any particular purpose.
// Any use of this software whatsoever is entirely at the user's own risk
// and the author and copyright holder accepts no liability in relation thereto.
//
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
stylesheet {
version "1.0"
xmlns "aw" "http://www.hsfr.org.uk/Schema/AntWeave"
output {
method text
version "1.0"
encoding "UTF-8"
omit-xml-declaration yes
standalone yes
indent no
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* PARAMETERS -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
parameter antweaveFileName "''"
parameter startCommentString "'%'"
parameter endCommentString "''"
parameter delay "'no'"
parameter index "'yes'"
parameter documentClass "'{article}'"
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*- CONSTANTS -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
constant NEW_LINE { text "
" }
constant NON_BREAKING_SPACE { text "~" }
constant SPACE { text " " }
constant AMPERSAND { text "&" }
constant BACKSLASH { text "\" }
constant DOLLAR { text "$" }
constant HASH { text "#" }
constant PERCENT { text "%" }
constant UNDERSCORE { text "_" }
constant NDASH { text "–" }
constant MDASH { text "—" }
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* TEMPLATES -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
match using "/" {
choose {
when "$delay = 'no'" {
call outputNowebLatexHeader
call processChunks {
with startChunk "'1'"
}
call outputLatexFooter
}
otherwise {
call outputFirstDocumentationChunk
call outputAntweaveFileName
call processChunks {
with startChunk "'2'"
}
}
}
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc outputNowebLatexHeader {
value "concat( $BACKSLASH, 'documentclass', $documentClass )"
value "concat( $BACKSLASH, 'usepackage{noweb}' )"
value "concat( $BACKSLASH, 'pagestyle{noweb}' )"
value "concat( $BACKSLASH, 'noweboptions{}' )"
value "concat( $BACKSLASH, 'input userMacros', $NEW_LINE )"
value "concat( $BACKSLASH, 'begin{document}' )"
call outputAntweaveFileName
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc outputLatexFooter {
value "concat( $BACKSLASH, 'end{document}', $NEW_LINE )"
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc outputAntweaveFileName {
value "concat( $BACKSLASH, 'nwfilename{', $antweaveFileName, '}' )"
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc outputFirstDocumentationChunk {
choose {
when "//aw:chunk[position() = '1' and @type = 'documentation']" {
foreach "//aw:chunk[@type = 'documentation'][position() = '1']/aw:text" {
variable removeStart {
choose {
when "starts-with(., '<!CDATA')" {
value "substring-after(., '<!CDATA')"
}
otherwise {
value "."
}
}
}
variable removeEnd {
choose {
when "contains($removeStart, '>') and not(substring-after($removeStart, '>'))" {
value "substring-before($removeStart, '>')"
}
otherwise {
value "$removeStart"
}
}
}
value "concat($removeEnd, $NEW_LINE)"
}
}
}
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc processChunks {
parameter startChunk
foreach "//aw:chunk[position() >= $startChunk]" {
choose {
when "@type = 'documentation'" {
if "position() = last() and $index = 'yes'" {
call outputFinalIndex
}
call beginDocumentationSection
call nwdocspar
call outputDocumentationLines
call endDocumentationSection
}
when "@type = 'code'" {
call beginCodeSection
if "$index = 'yes'" {
call sublabel
call nwmargintag
}
call moddef
value "concat( $BACKSLASH, 'nwstartdeflinemarkup' )"
call outputUsesOnDefLine
call outputPrevNextDefs
value "concat( $BACKSLASH, 'nwenddeflinemarkup' )"
apply-templates scope "outputCode"
call outputDefinitionsUsedIn
call endCodeSection
}
}
}
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc beginDocumentationSection {
value "concat( $BACKSLASH, 'nwbegindocs{', position() - 1, '}' )"
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc endDocumentationSection {
value "concat( $BACKSLASH, 'nwenddocs{', '}' )"
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc beginCodeSection {
value "concat( $BACKSLASH, 'nwbegincode{', position() - 1, '}')"
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc endCodeSection {
value "concat( $BACKSLASH, 'nwendcode{}' )"
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc outputDefinitionsUsedIn {
parameter codeHash
call outputDefinesForIndex
call outputAlsoDefinedIn
call outputUsedIn
call outputDefines
call outputVariablesUsed
call outputIndexUsed
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc outputDefinesForIndex {
variable currentChunkHash "@hash"
foreach "//aw:definitions[@chunk = $currentChunkHash]/aw:define" {
variable normalisedVar1 {
call normalisedTeXInDocumentation {
with txt "@var"
}
}
variable normalisedVar2 {
call normalisedVarInIndex {
with txt "@var"
}
}
variable variableDef {
value "concat( $BACKSLASH, 'nwixident{', $normalisedVar1, '}' )"
}
variable variablePair {
value "concat( '{', $normalisedVar2, '}{', $currentChunkHash, '}' )"
}
value "concat( $BACKSLASH, 'nwindexdefn{', $variableDef, '}', $variablePair )"
}
if "//aw:definitions[@chunk = $currentChunkHash]/aw:define" {
value "concat( $BACKSLASH, 'eatline', $NEW_LINE )"
}
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc outputDefines {
variable currentChunkHash "@hash"
variable defList {
foreach "//aw:definitions[@chunk = $currentChunkHash]/aw:define" {
variable normalisedVar1 {
call normalisedTeXInDocumentation {
with txt "@var"
}
}
variable normalisedVar2 {
call normalisedVarInIndex {
with txt "@var"
}
}
value "concat( $BACKSLASH, $BACKSLASH, '{{', $BACKSLASH, 'nwixident{', $normalisedVar1, '}}{', $normalisedVar2, '}}' )"
}
}
if "string-length($defList) > 0" {
value "concat( $BACKSLASH, 'nwidentdefs{', $defList, '}' )"
}
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc outputAlsoDefinedIn {
variable currentChunkHash "@hash"
variable currentChunkId {
call substringBeforeLast {
with delimiter "'-'"
with string "$currentChunkHash"
}
}
variable alsoList {
foreach "//aw:chunk[@type = 'code']" {
variable chunkId {
call substringBeforeLast {
with delimiter "'-'"
with string "@hash"
}
}
variable chunkIndex {
call substringAfterLast {
with delimiter "'-'"
with string "@hash"
}
}
if "($chunkId = $currentChunkId) and $chunkIndex != 1" {
value "concat( $BACKSLASH, $BACKSLASH, '{', @hash, '}')"
}
}
}
if "string-length($alsoList) > 0" {
value "concat( $BACKSLASH, 'nwalsodefined{', $alsoList, '}')"
}
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc outputIndexUsed {
variable currentHash "@hash"
variable usedList {
foreach "//aw:chunk[@hash = $currentHash]/aw:code/aw:variable" {
value "concat(., ' ')"
}
}
variable normalisedUsedList {
call removeDuplicates {
with delimiter "' '"
with newstring "''"
with string "$usedList"
}
}
variable taggedUsedList {
call build-indexed-variables {
with newstring "''"
with delimiter "' '"
with string "$normalisedUsedList"
with currentHash "$currentHash"
}
}
value "$taggedUsedList"
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc build-indexed-variables {
parameter string
parameter newstring
parameter delimiter
parameter currentHash
choose {
when "$string = ''" {
value "$newstring"
}
otherwise {
variable nextToken {
choose {
when "contains( $string, $delimiter )" {
value "substring-before( $string, $delimiter )"
}
otherwise {
value "$string"
}
}
}
variable normalisedVar1 {
call normalisedTeXInDocumentation {
with txt "$nextToken"
}
}
variable normalisedVar2 {
call normalisedVarInIndex {
with txt "$nextToken"
}
}
variable remainderLine "substring-after( $string, $delimiter )"
choose {
when "//aw:definitions[@chunk = $currentHash]//aw:define[@var = $nextToken]" {
call build-indexed-variables {
with string "$remainderLine"
with newstring "$newstring"
with delimiter "$delimiter"
with currentHash "$currentHash"
}
}
otherwise {
variable temp {
choose {
when "$newstring = ''" {
value "concat( $BACKSLASH, 'nwindexuse{', $BACKSLASH, 'nwixident{', $normalisedVar1, '}}{', $normalisedVar2, '}{', $currentHash, '}' )"
}
otherwise {
value "concat( $newstring, $BACKSLASH, 'nwindexuse{', $BACKSLASH, 'nwixident{', $normalisedVar1, '}}{', $normalisedVar2, '}{', $currentHash, '}' )"
}
}
}
call build-indexed-variables {
with string "$remainderLine"
with newstring "$temp"
with delimiter "$delimiter"
with currentHash "$currentHash"
}
}
}
}
}
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc outputUsedIn {
variable currentChunkHash "@hash"
choose {
when "not(//aw:definitions[@chunk = $currentChunkHash]/aw:usedIn/aw:where)" {
value "concat( $BACKSLASH, 'nwnotused{', @name, '}' )"
}
otherwise {
variable whereUsed {
foreach "//aw:definitions[@chunk = $currentChunkHash]/aw:usedIn/*" {
variable whereUsedIndex "@ref"
value "concat( $BACKSLASH, $BACKSLASH, '{', //aw:chunk[@id = $whereUsedIndex]/@hash, '}' )"
}
}
value "concat( $BACKSLASH, 'nwused{', $whereUsed, '}' )"
}
}
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc outputVariablesUsed {
variable currentChunkHash "@hash"
variable usedList {
foreach "//aw:chunk[@hash = $currentChunkHash]/aw:code/aw:variable" {
value "concat(., ' ')"
}
}
variable normalisedUsedList {
call removeDuplicates {
with delimiter "' '"
with newstring "''"
with string "$usedList"
}
}
variable taggedUsedList {
call build-variables {
with newstring "''"
with delimiter "' '"
with string "$normalisedUsedList"
with currentHash "$currentChunkHash"
}
}
if "string-length($taggedUsedList) > 0" {
value "concat( $BACKSLASH, 'nwidentuses{', $taggedUsedList, '}' )"
}
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc build-variables {
parameter string
parameter newstring
parameter delimiter
parameter currentHash
choose {
when "$string = ''" {
value "$newstring"
}
otherwise {
variable nextToken {
choose {
when "contains($string, $delimiter)" {
value "substring-before($string, $delimiter)"
}
otherwise {
value "$string"
}
}
}
variable normalisedVar1 {
call normalisedTeXInDocumentation {
with txt "$nextToken"
}
}
variable normalisedVar2 {
call normalisedVarInIndex {
with txt "$nextToken"
}
}
variable remainderLine "substring-after($string, $delimiter)"
choose {
when "//aw:definitions[@chunk = $currentHash]//aw:define[@var = $nextToken]" {
call build-variables {
with string "$remainderLine"
with newstring "$newstring"
with delimiter "$delimiter"
with currentHash "$currentHash"
}
}
otherwise {
variable temp {
choose {
when "$newstring = ''" {
value "concat( $BACKSLASH, $BACKSLASH, '{{', $BACKSLASH, 'nwixident{', $normalisedVar1, '}}{', $normalisedVar2, '}}' )"
}
otherwise {
value "concat( $newstring, $BACKSLASH, $BACKSLASH, '{{', $BACKSLASH, 'nwixident{', $normalisedVar1, '}}{', $normalisedVar2, '}}' )"
}
}
}
call build-variables {
with string "$remainderLine"
with newstring "$temp"
with delimiter "$delimiter"
with currentHash "$currentHash"
}
}
}
}
}
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc outputUsesOnDefLine {
variable currentChunkName "@name"
variable hashList {
foreach "//aw:chunk[@type = 'code']" {
variable usedInHash "@hash"
foreach "aw:code/aw:include" {
if "@ref = $currentChunkName" {
value "concat( $BACKSLASH, $BACKSLASH, '{', $usedInHash, '}' )"
}
}
}
}
if "string-length($hashList) > 0" {
value "concat( $BACKSLASH, 'nwusesondefline{', $hashList, '}' )"
}
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc outputPrevNextDefs {
variable currentName "@name"
variable hashIndex {
call substringAfterLast {
with delimiter "'-'"
with string "@hash"
}
}
variable chunkHashWithoutIndex {
call substringBeforeLast {
with delimiter "'-'"
with string "@hash"
}
}
variable numberOfFollowingSiblings "count(following-sibling::aw:chunk[$currentName = @name])"
variable numberOfAlsoChunks "count(//aw:chunk[$currentName = @name])"
variable nextHash {
choose {
when "$hashIndex = $numberOfAlsoChunks" {
value "concat( $BACKSLASH, 'relax' )"
}
otherwise {
value "concat( $chunkHashWithoutIndex, '-', $hashIndex + 1 )"
}
}
}
variable previousHash {
choose {
when "$hashIndex = 1" {
value "concat( $BACKSLASH, 'relax' )"
}
otherwise {
value "concat( $chunkHashWithoutIndex, '-', $hashIndex - 1 )"
}
}
}
if "not($numberOfAlsoChunks = 1)" {
value "concat( $BACKSLASH, 'nwprevnextdefs{', $previousHash, '}{', $nextHash, '}' )"
}
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc sublabel {
value "concat( $BACKSLASH, 'sublabel{', @hash, '}' )"
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc nwmargintag {
value "concat( $BACKSLASH, 'nwmargintag{' )"
call nwtagstyle
text "}"
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc nwtagstyle {
value "concat( '{', $BACKSLASH, 'nwtagstyle{}', $BACKSLASH, 'subpageref{', @hash, '}}' )"
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc moddef {
value "concat( $BACKSLASH, 'moddef{' )"
value "@name"
if "$index = 'yes'" {
value "$NON_BREAKING_SPACE"
call nwtagstyle
}
text "}"
variable hashIndex {
call substringAfterLast {
with delimiter "'-'"
with string "@hash"
}
}
choose {
when "$hashIndex = 1" {
value "concat( $BACKSLASH, 'endmoddef' )"
}
otherwise {
value "concat( $BACKSLASH, 'plusendmoddef' )"
}
}
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc nwdocspar {
value "concat($BACKSLASH, 'nwdocspar', $NEW_LINE )"
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc outputFinalIndex {
value "concat( $NEW_LINE, $NEW_LINE )"
call outputChunkIndex
call outputVariableIndex
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc outputChunkIndex {
foreach "//aw:chunk[@type = 'code']" {
sort using "@name" lower-first
variable chunkName "@name"
variable hashIndex {
call substringAfterLast {
with delimiter "'-'"
with string "@hash"
}
}
if "$hashIndex = '1'" {
value "concat( $BACKSLASH, 'nwixlogsorted{c}{' )"
value "concat('{', @name, '}{', @hash, '}')"
text "{"
foreach "//aw:chunk[@type = 'code']" {
variable scannedChunkName "@name"
choose {
when "$chunkName = $scannedChunkName" {
variable whereDefined "@hash"
value "concat( $BACKSLASH, 'nwixd{', $whereDefined, '}' )"
}
when "$chunkName = descendant::aw:include/@ref" {
variable whereUsed "@hash"
value "concat( $BACKSLASH, 'nwixu{', $whereUsed, '}' )"
}
}
}
value "concat( '}}', $startCommentString, $NEW_LINE )"
}
}
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc outputVariableIndex {
foreach "//aw:definitions/aw:define" {
sort using "@var" lower-first
variable normalisedVar1 {
call normalisedTeXInDocumentation {
with txt "@var"
}
}
variable normalisedVar2 {
call normalisedVarInIndex {
with txt "@var"
}
}
value "concat( $BACKSLASH, 'nwixlogsorted{i}{{', $BACKSLASH, 'nwixident{', $normalisedVar1, '}}{', $normalisedVar2, '}}', $startCommentString, $NEW_LINE )"
}
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
match using "aw:code" scope "outputCode" {
apply-templates scope "outputLineFragment"
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
match using "aw:token" scope "outputLineFragment" {
variable normalisedTeXLine {
call normalisedTeXInCode {
with txt "."
}
}
value "$normalisedTeXLine"
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
match using "aw:separator" scope "outputLineFragment" {
variable normalisedTeXLine {
call normalisedTeXInCode {
with txt "."
}
}
value "$normalisedTeXLine"
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
match using "aw:variable" scope "outputLineFragment" {
variable normalisedTeXLine {
call normalisedTeXInDocumentation {
with txt "."
}
}
value "concat( $BACKSLASH, 'nwlinkedidentc{', $normalisedTeXLine, '}{', ancestor::aw:chunk/@hash, '}' )"
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
match using "aw:include" scope "outputLineFragment" {
variable thisIncludeRef "@ref"
variable chunkHash "//aw:chunk[@type = 'code'][@name = $thisIncludeRef]/@hash"
value "concat( $BACKSLASH, 'LA{}' )"
value "concat( $thisIncludeRef, $NON_BREAKING_SPACE )"
value "concat( '{', $BACKSLASH, 'nwtagstyle{}', $BACKSLASH, 'subpageref{', $chunkHash, '}}' )"
value "concat( $BACKSLASH, 'RA{}' )"
value "."
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc normalisedTeXInCode {
parameter txt
choose {
when "contains($txt, $DOLLAR )" {
value "substring-before($txt, $DOLLAR )"
value "concat( $BACKSLASH, $DOLLAR )"
call normalisedTeXInCode {
with txt "substring-after($txt, $DOLLAR )"
}
}
when "contains( $txt, $AMPERSAND )" {
value "substring-before( $txt, $AMPERSAND)"
value "concat( $BACKSLASH, $AMPERSAND )"
call normalisedTeXInCode {
with txt "substring-after( $txt, $AMPERSAND )"
}
}
when "contains($txt, $PERCENT )" {
value "substring-before($txt, $PERCENT )"
value "concat( $BACKSLASH, $PERCENT )"
call normalisedTeXInCode {
with txt "substring-after($txt, $PERCENT )"
}
}
when "contains($txt, $HASH )" {
value "substring-before($txt, $HASH )"
value "concat( $BACKSLASH, $HASH )"
call normalisedTeXInCode {
with txt "substring-after($txt, $HASH )"
}
}
when "contains( $txt, $UNDERSCORE )" {
value "substring-before($txt, $UNDERSCORE )"
value "concat( '{', $BACKSLASH, $UNDERSCORE, '}' )"
call normalisedTeXInCode {
with txt "substring-after($txt, $UNDERSCORE )"
}
}
when "contains($txt, '{' )" {
value "substring-before($txt, '{' )"
value "concat( $BACKSLASH, '{' )"
call normalisedTeXInCode {
with txt "substring-after($txt, '{' )"
}
}
when "contains($txt, '}')" {
value "substring-before($txt, '}')"
value "concat( $BACKSLASH, '}' )"
call normalisedTeXInCode {
with txt "substring-after($txt, '}')"
}
}
when "contains($txt, $BACKSLASH )" {
value "substring-before($txt, $BACKSLASH )"
value "concat( $BACKSLASH, $BACKSLASH )"
call normalisedTeXInCode {
with txt "substring-after($txt, $BACKSLASH )"
}
}
otherwise {
value "$txt"
}
}
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc outputDocumentationLines {
variable chunkNumber "position()"
apply-templates scope "outputDocumentationFragment"
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
match using "aw:text" scope "outputDocumentationFragment" {
variable removeStart {
choose {
when "starts-with(., '<!CDATA')" {
value "substring-after(., '<!CDATA')"
}
otherwise {
value "."
}
}
}
variable removeEnd {
choose {
when "contains($removeStart, '>') and not(substring-after($removeStart, '>'))" {
value "substring-before($removeStart, '>')"
}
otherwise {
value "$removeStart"
}
}
}
variable normalisedTeXLine {
call normalisedTeXInDocumentation {
with txt "$removeEnd"
}
}
value "$normalisedTeXLine"
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc normalisedTeXInDocumentation {
parameter txt
parameter isInBracket "false()"
choose {
when "contains($txt, '[[') and not($isInBracket)" {
value "substring-before($txt, '[[')"
value "concat( '{', $BACKSLASH, 'Tt{}' )"
call normalisedTeXInDocumentation {
with txt "substring-after($txt, '[[')"
with isInBracket "true()"
}
}
when "contains($txt, ']]')" {
value "substring-before($txt, ']]')"
value "concat( $BACKSLASH, 'nwendquote}' )"
call normalisedTeXInDocumentation {
with txt "substring-after($txt, ']]')"
}
}
when "contains($txt, $MDASH )" {
value "substring-before($txt, $MDASH )"
text "---"
call normalisedTeXInDocumentation {
with txt "substring-after($txt, $MDASH )"
}
}
when "contains($txt, $NDASH )" {
value "substring-before($txt, $NDASH )"
text "--"
call normalisedTeXInDocumentation {
with txt "substring-after($txt, $NDASH )"
}
}
when "contains( $txt, $AMPERSAND )" {
value "substring-before( $txt, $AMPERSAND )"
value "concat( '{', $BACKSLASH, $AMPERSAND, '}')"
call normalisedTeXInDocumentation {
with txt "substring-after( $txt, $AMPERSAND )"
}
}
when "contains($txt, $HASH )" {
value "substring-before($txt, $HASH )"
value "concat( '{', $BACKSLASH, $HASH, '}' )"
call normalisedTeXInDocumentation {
with txt "substring-after($txt, $HASH )"
}
}
when "contains($txt, $UNDERSCORE )" {
value "substring-before($txt, $UNDERSCORE)"
value "concat( '{', $BACKSLASH, $UNDERSCORE, '}' )"
call normalisedTeXInDocumentation {
with txt "substring-after($txt, $UNDERSCORE )"
}
}
otherwise {
value "$txt"
}
}
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc outputIdentRefs {
parameter inputText "."
parameter delimiter "$SPACE"
parameter startText "'\\\\{'"
parameter endText "'}'"
if "string-length($inputText) > 0" {
variable nextItem "substring-before(concat($inputText, $delimiter), $delimiter)"
variable chunkHash "//aw:chunk[@id = $nextItem]/@hash"
value "concat($startText, $chunkHash, $endText)"
call outputIdentRefs {
with inputText "substring-after($inputText, $delimiter)"
with delimiter "$delimiter"
with startText "$startText"
with endText "$endText"
}
}
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc normalisedVarInIndex {
parameter txt
choose {
when "contains( $txt, $DOLLAR )" {
value "substring-before( $txt, $DOLLAR )"
text ":do"
call normalisedVarInIndex {
with txt "substring-after( $txt, $DOLLAR )"
}
}
when "contains( $txt, $AMPERSAND )" {
value "substring-before( $txt, $AMPERSAND )"
text ":am"
call normalisedVarInIndex {
with txt "substring-after( $txt, $AMPERSAND )"
}
}
otherwise {
value "$txt"
}
}
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc substringAfterLast {
parameter string
parameter delimiter
choose {
when "contains($string, $delimiter)" {
call substringAfterLast {
with string "substring-after($string, $delimiter)"
with delimiter "$delimiter"
}
}
otherwise {
value "$string"
}
}
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc substringBeforeLast {
parameter string
parameter delimiter
choose {
when "contains($string, $delimiter)" {
value "substring-before($string, $delimiter)"
choose {
when "contains(substring-after($string, $delimiter), $delimiter)" {
value "$delimiter"
}
}
call substringBeforeLast {
with string "substring-after($string, $delimiter)"
with delimiter "$delimiter"
}
}
}
}
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
// -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
proc removeDuplicates {
parameter string
parameter newstring
parameter delimiter
choose {
when "$string = ''" {
value "$newstring"
}
otherwise {
variable nextToken "substring-before($string, $delimiter)"
variable remainderLine "substring-after($string, $delimiter)"
choose {
when "contains($newstring, $nextToken)" {
call removeDuplicates {
with string "$remainderLine"
with newstring "$newstring"
with delimiter "$delimiter"
}
}
otherwise {
variable temp {
choose {
when "$newstring = ''" {
value "$nextToken"
}
otherwise {
value "concat($newstring, $delimiter, $nextToken)"
}
}
}
call removeDuplicates {
with string "$remainderLine"
with newstring "$temp"
with delimiter "$delimiter"
}
}
}
}
}
}
}