本文整理了Java中org.antlr.v4.tool.Grammar.isCombined()
方法的一些代码示例,展示了Grammar.isCombined()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Grammar.isCombined()
方法的具体详情如下:
包路径:org.antlr.v4.tool.Grammar
类名称:Grammar
方法名:isCombined
暂无
代码示例来源:origin: com.impetus.fabric/fabric-jdbc-driver-shaded
@Override
protected void enterChannelsSpec(GrammarAST tree) {
if (g.isParser()) {
g.tool.errMgr.grammarError(ErrorType.CHANNELS_BLOCK_IN_PARSER_GRAMMAR, g.fileName, tree.token);
}
else if (g.isCombined()) {
g.tool.errMgr.grammarError(ErrorType.CHANNELS_BLOCK_IN_COMBINED_GRAMMAR, g.fileName, tree.token);
}
}
代码示例来源:origin: io.virtdata/virtdata-lib-realer
@Override
protected void enterChannelsSpec(GrammarAST tree) {
if (g.isParser()) {
g.tool.errMgr.grammarError(ErrorType.CHANNELS_BLOCK_IN_PARSER_GRAMMAR, g.fileName, tree.token);
}
else if (g.isCombined()) {
g.tool.errMgr.grammarError(ErrorType.CHANNELS_BLOCK_IN_COMBINED_GRAMMAR, g.fileName, tree.token);
}
}
代码示例来源:origin: org.antlr/antlr4
@Override
protected void enterChannelsSpec(GrammarAST tree) {
if (g.isParser()) {
g.tool.errMgr.grammarError(ErrorType.CHANNELS_BLOCK_IN_PARSER_GRAMMAR, g.fileName, tree.token);
}
else if (g.isCombined()) {
g.tool.errMgr.grammarError(ErrorType.CHANNELS_BLOCK_IN_COMBINED_GRAMMAR, g.fileName, tree.token);
}
}
代码示例来源:origin: org.antlr/antlr4
/** Get the name of the generated recognizer; may or may not be same
* as grammar name.
* Recognizer is TParser and TLexer from T if combined, else
* just use T regardless of grammar type.
*/
public String getRecognizerName() {
String suffix = "";
List<Grammar> grammarsFromRootToMe = getOutermostGrammar().getGrammarAncestors();
String qualifiedName = name;
if ( grammarsFromRootToMe!=null ) {
StringBuilder buf = new StringBuilder();
for (Grammar g : grammarsFromRootToMe) {
buf.append(g.name);
buf.append('_');
}
buf.append(name);
qualifiedName = buf.toString();
}
if ( isCombined() || (isLexer() && implicitLexer!=null) )
{
suffix = Grammar.getGrammarTypeToFileNameSuffix(getType());
}
return qualifiedName+suffix;
}
代码示例来源:origin: uk.co.nichesolutions/antlr4
@Override
protected void enterChannelsSpec(GrammarAST tree) {
if (g.isParser()) {
g.tool.errMgr.grammarError(ErrorType.CHANNELS_BLOCK_IN_PARSER_GRAMMAR, g.fileName, tree.token);
}
else if (g.isCombined()) {
g.tool.errMgr.grammarError(ErrorType.CHANNELS_BLOCK_IN_COMBINED_GRAMMAR, g.fileName, tree.token);
}
}
代码示例来源:origin: com.tunnelvisionlabs/antlr4
@Override
protected void enterChannelsSpec(GrammarAST tree) {
if (g.isParser()) {
g.tool.errMgr.grammarError(ErrorType.CHANNELS_BLOCK_IN_PARSER_GRAMMAR, g.fileName, tree.token);
}
else if (g.isCombined()) {
g.tool.errMgr.grammarError(ErrorType.CHANNELS_BLOCK_IN_COMBINED_GRAMMAR, g.fileName, tree.token);
}
}
代码示例来源:origin: uk.co.nichesolutions/antlr4
/** Get the name of the generated recognizer; may or may not be same
* as grammar name.
* Recognizer is TParser and TLexer from T if combined, else
* just use T regardless of grammar type.
*/
public String getRecognizerName() {
String suffix = "";
List<Grammar> grammarsFromRootToMe = getOutermostGrammar().getGrammarAncestors();
String qualifiedName = name;
if ( grammarsFromRootToMe!=null ) {
StringBuilder buf = new StringBuilder();
for (Grammar g : grammarsFromRootToMe) {
buf.append(g.name);
buf.append('_');
}
buf.append(name);
qualifiedName = buf.toString();
}
if ( isCombined() || (isLexer() && implicitLexer!=null) )
{
suffix = Grammar.getGrammarTypeToFileNameSuffix(getType());
}
return qualifiedName+suffix;
}
代码示例来源:origin: org.antlr/antlr4
public LexerInterpreter createLexerInterpreter(CharStream input) {
if (this.isParser()) {
throw new IllegalStateException("A lexer interpreter can only be created for a lexer or combined grammar.");
}
if (this.isCombined()) {
return implicitLexer.createLexerInterpreter(input);
}
char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn);
ATN deserialized = new ATNDeserializer().deserialize(serializedAtn);
List<String> allChannels = new ArrayList<String>();
allChannels.add("DEFAULT_TOKEN_CHANNEL");
allChannels.add("HIDDEN");
allChannels.addAll(channelValueToNameList);
return new LexerInterpreter(fileName, getVocabulary(), Arrays.asList(getRuleNames()), allChannels, ((LexerGrammar)this).modes.keySet(), deserialized, input);
}
代码示例来源:origin: com.impetus.fabric/fabric-jdbc-driver-shaded
/** Get the name of the generated recognizer; may or may not be same
* as grammar name.
* Recognizer is TParser and TLexer from T if combined, else
* just use T regardless of grammar type.
*/
public String getRecognizerName() {
String suffix = "";
List<Grammar> grammarsFromRootToMe = getOutermostGrammar().getGrammarAncestors();
String qualifiedName = name;
if ( grammarsFromRootToMe!=null ) {
StringBuilder buf = new StringBuilder();
for (Grammar g : grammarsFromRootToMe) {
buf.append(g.name);
buf.append('_');
}
buf.append(name);
qualifiedName = buf.toString();
}
if ( isCombined() || (isLexer() && implicitLexer!=null) )
{
suffix = Grammar.getGrammarTypeToFileNameSuffix(getType());
}
return qualifiedName+suffix;
}
代码示例来源:origin: io.virtdata/virtdata-lib-realer
/** Get the name of the generated recognizer; may or may not be same
* as grammar name.
* Recognizer is TParser and TLexer from T if combined, else
* just use T regardless of grammar type.
*/
public String getRecognizerName() {
String suffix = "";
List<Grammar> grammarsFromRootToMe = getOutermostGrammar().getGrammarAncestors();
String qualifiedName = name;
if ( grammarsFromRootToMe!=null ) {
StringBuilder buf = new StringBuilder();
for (Grammar g : grammarsFromRootToMe) {
buf.append(g.name);
buf.append('_');
}
buf.append(name);
qualifiedName = buf.toString();
}
if ( isCombined() || (isLexer() && implicitLexer!=null) )
{
suffix = Grammar.getGrammarTypeToFileNameSuffix(getType());
}
return qualifiedName+suffix;
}
代码示例来源:origin: com.tunnelvisionlabs/antlr4
public LexerInterpreter createLexerInterpreter(CharStream input) {
if (this.isParser()) {
throw new IllegalStateException("A lexer interpreter can only be created for a lexer or combined grammar.");
}
if (this.isCombined()) {
return implicitLexer.createLexerInterpreter(input);
}
char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn, Arrays.asList(getRuleNames()));
ATN deserialized = new ATNDeserializer().deserialize(serializedAtn);
List<String> allChannels = new ArrayList<String>();
allChannels.add("DEFAULT_TOKEN_CHANNEL");
allChannels.add("HIDDEN");
allChannels.addAll(channelValueToNameList);
return new LexerInterpreter(fileName, getVocabulary(), Arrays.asList(getRuleNames()), allChannels, ((LexerGrammar)this).modes.keySet(), deserialized, input);
}
代码示例来源:origin: io.virtdata/virtdata-lib-realer
public LexerInterpreter createLexerInterpreter(CharStream input) {
if (this.isParser()) {
throw new IllegalStateException("A lexer interpreter can only be created for a lexer or combined grammar.");
}
if (this.isCombined()) {
return implicitLexer.createLexerInterpreter(input);
}
char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn);
ATN deserialized = new ATNDeserializer().deserialize(serializedAtn);
List<String> allChannels = new ArrayList<String>();
allChannels.add("DEFAULT_TOKEN_CHANNEL");
allChannels.add("HIDDEN");
allChannels.addAll(channelValueToNameList);
return new LexerInterpreter(fileName, getVocabulary(), Arrays.asList(getRuleNames()), allChannels, ((LexerGrammar)this).modes.keySet(), deserialized, input);
}
代码示例来源:origin: com.tunnelvisionlabs/antlr4
/** Get the name of the generated recognizer; may or may not be same
* as grammar name.
* Recognizer is TParser and TLexer from T if combined, else
* just use T regardless of grammar type.
*/
public String getRecognizerName() {
String suffix = "";
List<Grammar> grammarsFromRootToMe = getOutermostGrammar().getGrammarAncestors();
String qualifiedName = name;
if ( grammarsFromRootToMe!=null ) {
StringBuilder buf = new StringBuilder();
for (Grammar g : grammarsFromRootToMe) {
buf.append(g.name);
buf.append('_');
}
if (isAbstract()) {
buf.append("Abstract");
}
buf.append(name);
qualifiedName = buf.toString();
}
else if (isAbstract()) {
qualifiedName = "Abstract" + name;
}
if ( isCombined() || (isLexer() && implicitLexer!=null) )
{
suffix = Grammar.getGrammarTypeToFileNameSuffix(getType());
}
return qualifiedName+suffix;
}
代码示例来源:origin: com.impetus.fabric/fabric-jdbc-driver-shaded
public LexerInterpreter createLexerInterpreter(CharStream input) {
if (this.isParser()) {
throw new IllegalStateException("A lexer interpreter can only be created for a lexer or combined grammar.");
}
if (this.isCombined()) {
return implicitLexer.createLexerInterpreter(input);
}
char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn);
ATN deserialized = new ATNDeserializer().deserialize(serializedAtn);
return new LexerInterpreter(fileName, getVocabulary(), Arrays.asList(getRuleNames()), ((LexerGrammar)this).modes.keySet(), deserialized, input);
}
代码示例来源:origin: uk.co.nichesolutions/antlr4
public LexerInterpreter createLexerInterpreter(CharStream input) {
if (this.isParser()) {
throw new IllegalStateException("A lexer interpreter can only be created for a lexer or combined grammar.");
}
if (this.isCombined()) {
return implicitLexer.createLexerInterpreter(input);
}
char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn);
ATN deserialized = new ATNDeserializer().deserialize(serializedAtn);
return new LexerInterpreter(fileName, getVocabulary(), Arrays.asList(getRuleNames()), ((LexerGrammar)this).modes.keySet(), deserialized, input);
}
代码示例来源:origin: com.tunnelvisionlabs/antlr4
void checkImport(Token importID) {
Grammar delegate = g.getImportedGrammar(importID.getText());
if ( delegate==null ) return;
List<Integer> validDelegators = validImportTypes.get(delegate.getType());
if ( validDelegators!=null && !validDelegators.contains(g.getType()) ) {
g.tool.errMgr.grammarError(ErrorType.INVALID_IMPORT,
g.fileName,
importID,
g, delegate);
}
if ( g.isCombined() &&
(delegate.name.equals(g.name+Grammar.getGrammarTypeToFileNameSuffix(ANTLRParser.LEXER))||
delegate.name.equals(g.name+Grammar.getGrammarTypeToFileNameSuffix(ANTLRParser.PARSER))) )
{
g.tool.errMgr.grammarError(ErrorType.IMPORT_NAME_CLASH,
g.fileName,
importID,
g, delegate);
}
}
}
代码示例来源:origin: org.antlr/antlr4
void checkImport(Token importID) {
Grammar delegate = g.getImportedGrammar(importID.getText());
if ( delegate==null ) return;
List<Integer> validDelegators = validImportTypes.get(delegate.getType());
if ( validDelegators!=null && !validDelegators.contains(g.getType()) ) {
g.tool.errMgr.grammarError(ErrorType.INVALID_IMPORT,
g.fileName,
importID,
g, delegate);
}
if ( g.isCombined() &&
(delegate.name.equals(g.name+Grammar.getGrammarTypeToFileNameSuffix(ANTLRParser.LEXER))||
delegate.name.equals(g.name+Grammar.getGrammarTypeToFileNameSuffix(ANTLRParser.PARSER))) )
{
g.tool.errMgr.grammarError(ErrorType.IMPORT_NAME_CLASH,
g.fileName,
importID,
g, delegate);
}
}
}
代码示例来源:origin: uk.co.nichesolutions/antlr4
void checkImport(Token importID) {
Grammar delegate = g.getImportedGrammar(importID.getText());
if ( delegate==null ) return;
List<Integer> validDelegators = validImportTypes.get(delegate.getType());
if ( validDelegators!=null && !validDelegators.contains(g.getType()) ) {
g.tool.errMgr.grammarError(ErrorType.INVALID_IMPORT,
g.fileName,
importID,
g, delegate);
}
if ( g.isCombined() &&
(delegate.name.equals(g.name+Grammar.getGrammarTypeToFileNameSuffix(ANTLRParser.LEXER))||
delegate.name.equals(g.name+Grammar.getGrammarTypeToFileNameSuffix(ANTLRParser.PARSER))) )
{
g.tool.errMgr.grammarError(ErrorType.IMPORT_NAME_CLASH,
g.fileName,
importID,
g, delegate);
}
}
}
代码示例来源:origin: io.virtdata/virtdata-lib-realer
void checkImport(Token importID) {
Grammar delegate = g.getImportedGrammar(importID.getText());
if ( delegate==null ) return;
List<Integer> validDelegators = validImportTypes.get(delegate.getType());
if ( validDelegators!=null && !validDelegators.contains(g.getType()) ) {
g.tool.errMgr.grammarError(ErrorType.INVALID_IMPORT,
g.fileName,
importID,
g, delegate);
}
if ( g.isCombined() &&
(delegate.name.equals(g.name+Grammar.getGrammarTypeToFileNameSuffix(ANTLRParser.LEXER))||
delegate.name.equals(g.name+Grammar.getGrammarTypeToFileNameSuffix(ANTLRParser.PARSER))) )
{
g.tool.errMgr.grammarError(ErrorType.IMPORT_NAME_CLASH,
g.fileName,
importID,
g, delegate);
}
}
}
代码示例来源:origin: com.impetus.fabric/fabric-jdbc-driver-shaded
void checkImport(Token importID) {
Grammar delegate = g.getImportedGrammar(importID.getText());
if ( delegate==null ) return;
List<Integer> validDelegators = validImportTypes.get(delegate.getType());
if ( validDelegators!=null && !validDelegators.contains(g.getType()) ) {
g.tool.errMgr.grammarError(ErrorType.INVALID_IMPORT,
g.fileName,
importID,
g, delegate);
}
if ( g.isCombined() &&
(delegate.name.equals(g.name+Grammar.getGrammarTypeToFileNameSuffix(ANTLRParser.LEXER))||
delegate.name.equals(g.name+Grammar.getGrammarTypeToFileNameSuffix(ANTLRParser.PARSER))) )
{
g.tool.errMgr.grammarError(ErrorType.IMPORT_NAME_CLASH,
g.fileName,
importID,
g, delegate);
}
}
}
内容来源于网络,如有侵权,请联系作者删除!