blob: cd5921946372612be07cd764be0abcd5a6ef941b [file] [log] [blame]
/*=============================================================================#
# Copyright (c) 2014, 2019 Stephan Wahlbrink and others.
#
# This program and the accompanying materials are made available under the
# terms of the Eclipse Public License 2.0 which is available at
# https://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0
# which is available at https://www.apache.org/licenses/LICENSE-2.0.
#
# SPDX-License-Identifier: EPL-2.0 OR Apache-2.0
#
# Contributors:
# Stephan Wahlbrink <sw@wahlbrink.eu> - initial API and implementation
#=============================================================================*/
package org.eclipse.statet.docmlet.wikitext.core.source;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.jface.text.BadLocationException;
import org.eclipse.jface.text.IDocument;
import org.eclipse.mylyn.wikitext.parser.Attributes;
import org.eclipse.mylyn.wikitext.parser.DocumentBuilder;
import org.eclipse.mylyn.wikitext.parser.Locator;
import org.eclipse.statet.ecommons.text.core.treepartitioner.TreePartitionNode;
import org.eclipse.statet.ecommons.text.core.treepartitioner.TreePartitionNodeScan;
import org.eclipse.statet.ecommons.text.core.treepartitioner.TreePartitionNodeScan.BreakException;
import org.eclipse.statet.ecommons.text.core.treepartitioner.TreePartitionNodeScanner;
import org.eclipse.statet.ecommons.text.core.treepartitioner.TreePartitionNodeType;
import org.eclipse.statet.docmlet.wikitext.core.markup.MarkupParser2;
import org.eclipse.statet.docmlet.wikitext.core.markup.WikitextLocator;
import org.eclipse.statet.docmlet.wikitext.core.markup.WikitextMarkupLanguage;
import org.eclipse.statet.ltk.core.SourceContent;
public class WikitextPartitionNodeScanner extends DocumentBuilder
implements TreePartitionNodeScanner {
private WikitextMarkupLanguage markupLanguage;
private final int markupLanguageMode;
private TreePartitionNodeScan scan;
/** The current node */
private TreePartitionNode node;
private int startOffset;
private int endOffset;
private WikitextLocator locator2;
private int ignoreCounter;
private final List<Attributes> attributeStack= new ArrayList<>();
public WikitextPartitionNodeScanner(final WikitextMarkupLanguage markupLanguage) {
this(markupLanguage, 0);
}
public WikitextPartitionNodeScanner(final WikitextMarkupLanguage markupLanguage,
final int markupLanguageMode) {
this.markupLanguageMode= markupLanguageMode;
setMarkupLanguage(markupLanguage);
}
public WikitextMarkupLanguage getMarkupLanguage() {
return this.markupLanguage;
}
public void setMarkupLanguage(final WikitextMarkupLanguage markupLanguage) {
if (this.markupLanguage != null
&& this.markupLanguage.getName() == markupLanguage.getName()
&& this.markupLanguage.getClass() == markupLanguage.getClass()) {
this.markupLanguage.setMarkupConfig(markupLanguage.getMarkupConfig());
if (this.markupLanguage.equals(markupLanguage)) {
return;
}
}
this.markupLanguage= markupLanguage.clone("Doc/Partitioner", this.markupLanguageMode); //$NON-NLS-1$
}
protected boolean isTemplateMode() {
return ((this.markupLanguageMode & WikitextMarkupLanguage.TEMPLATE_MODE) != 0);
}
@Override
public void setLocator(final Locator locator) {
super.setLocator(locator);
this.locator2= (WikitextLocator) locator;
}
@Override
public int getRestartOffset(TreePartitionNode node, final IDocument document,
int offset) throws BadLocationException {
final WikitextPartitionNodeType rootType= getDefaultRootType();
TreePartitionNode parent= node.getParent();
if (parent != null) {
while (parent.getType() != rootType) {
node= parent;
parent= node.getParent();
}
// start at line start, but never inside a child
int idx= parent.indexOfChild(node);
while (true) {
final int line= document.getLineOfOffset(node.getStartOffset());
offset= document.getLineOffset(line);
if (idx > 0) {
node= parent.getChild(--idx);
if (offset < node.getEndOffset()) {
continue;
}
}
break;
}
}
return offset;
}
@Override
public WikitextPartitionNodeType getDefaultRootType() {
return WikitextPartitionNodeType.DEFAULT_ROOT;
}
@Override
public void execute(final TreePartitionNodeScan scan) throws BreakException {
this.scan= scan;
this.node= null;
setRange(scan.getStartOffset(), scan.getEndOffset());
init();
assert (this.node != null);
process();
}
protected TreePartitionNodeScan getScan() {
return this.scan;
}
protected void setRange(final int startOffset, final int endOffset) {
this.startOffset= startOffset;
this.endOffset= endOffset;
// this.reader.setRange(getScan().getDocument(), startOffset, endOffset - startOffset);
// updateLast();
}
protected void init() {
final TreePartitionNode beginNode= getScan().getBeginNode();
if (beginNode.getType() instanceof WikitextPartitionNodeType) {
this.node= beginNode;
}
else {
this.node= beginNode;
addNode(getDefaultRootType(), getScan().getStartOffset());
}
}
protected final int getStartOffset() {
return this.startOffset;
}
protected final void initNode(final TreePartitionNode node) {
if (this.node != null) {
throw new IllegalStateException();
}
this.node= node;
}
protected final void addNode(final TreePartitionNodeType type, final int offset) {
this.node= this.scan.add(type, this.node, offset, 0);
}
protected final TreePartitionNode getNode() {
return this.node;
}
protected final void exitNode(final int offset, final int flags) {
this.scan.expand(this.node, offset, flags, true);
this.node= this.node.getParent();
}
protected final void exitNode() {
this.node= this.node.getParent();
}
private void process() {
try {
final DocumentBuilder builder= this;
// final DocumentBuilder builder= new MultiplexingDocumentBuilder(new EventLoggingDocumentBuilder(), this);
final MarkupParser2 markupParser= new MarkupParser2(this.markupLanguage, builder);
configure(markupParser);
final SourceContent content= new SourceContent(0,
this.scan.getDocument().get(this.startOffset, this.endOffset - this.startOffset),
this.startOffset );
markupParser.parse(content, false);
}
catch (final BadLocationException e) {
throw new RuntimeException(e);
}
finally {
this.attributeStack.clear();
}
}
protected void configure(final MarkupParser2 markupParser) {
markupParser.disable(MarkupParser2.GENERATIVE_CONTENT);
markupParser.enable(MarkupParser2.SOURCE_STRUCT);
markupParser.disable(MarkupParser2.INLINE_ALL);
}
protected final int getEventBeginOffset() {
return this.startOffset + this.locator2.getBeginOffset();
}
protected final int getEventEndOffset() {
return this.startOffset + this.locator2.getEndOffset();
}
protected final int getEventFlags(final Attributes attributes) {
if (attributes instanceof SourceElementDetail) {
return (((SourceElementDetail) attributes).getSourceElementDetail() & TreePartitionNode.END_UNCLOSED);
}
return 0;
}
@Override
public void beginDocument() {
}
@Override
public void endDocument() {
}
@Override
public void beginBlock(final BlockType type, final Attributes attributes) {
if (this.ignoreCounter > 0 || ignore(type)) {
this.ignoreCounter++;
return;
}
addNode(WikitextPartitionNodeType.BLOCK_TYPES.get(type), getEventBeginOffset());
this.attributeStack.add(attributes);
}
private boolean ignore(final BlockType type) {
switch (type) {
case DEFINITION_ITEM:
case LIST_ITEM:
case TABLE_CELL_HEADER:
case TABLE_CELL_NORMAL:
case TABLE_ROW:
return true;
default:
return (this.node != null
&& this.node.getType() instanceof WikitextPartitionNodeType
&& ((WikitextPartitionNodeType) this.node.getType()).getBlockType() == BlockType.QUOTE );
}
}
@Override
public void endBlock() {
if (this.ignoreCounter > 0) {
this.ignoreCounter--;
return;
}
final Attributes attributes= this.attributeStack.remove(this.attributeStack.size() - 1);
exitNode(getEventEndOffset(), getEventFlags(attributes));
}
@Override
public void beginSpan(final SpanType type, final Attributes attributes) {
this.scan.expand(this.node, getEventBeginOffset(), 0, false);
}
@Override
public void endSpan() {
this.scan.expand(this.node, getEventEndOffset(), 0, false);
}
@Override
public void beginHeading(final int level, final Attributes attributes) {
addNode(WikitextPartitionNodeType.HEADING_TYPES.get(level), getEventBeginOffset());
this.attributeStack.add(attributes);
}
@Override
public void endHeading() {
final Attributes attributes= this.attributeStack.remove(this.attributeStack.size() - 1);
exitNode(getEventEndOffset(), getEventFlags(attributes));
}
@Override
public void characters(final String text) {
}
@Override
public void charactersUnescaped(final String literal) {
}
@Override
public void entityReference(final String entity) {
}
@Override
public void image(final Attributes attributes, final String url) {
}
@Override
public void link(final Attributes attributes, final String hrefOrHashName, final String text) {
}
@Override
public void imageLink(final Attributes linkAttributes, final Attributes imageAttributes,
final String href, final String imageUrl) {
}
@Override
public void acronym(final String text, final String definition) {
}
@Override
public void lineBreak() {
}
}