1
0
Fork 0
mirror of https://github.com/FrankerFaceZ/FrankerFaceZ.git synced 2025-08-07 06:40:54 +00:00
* Fixed: Apply a maximum height to the ban reason pop-up to avoid long lists from going off screen.
* API Added: `removeTokenizer(type)` and `removeRichProvider(type)` methods for the chat module.
* API Changed: Chat tokenizers no longer need to return a token list, if tokens have not changed.
This commit is contained in:
SirStendec 2021-12-01 16:48:10 -05:00
parent a35387abcf
commit c0f7747428
9 changed files with 113 additions and 102 deletions

View file

@ -1854,6 +1854,24 @@ export default class Chat extends Module {
});
}
removeTokenizer(tokenizer) {
let type;
if ( typeof tokenizer === 'string' ) type = tokenizer;
else type = tokenizer.type;
tokenizer = this.tokenizers[type];
if ( ! tokenizer )
return null;
if ( tokenizer.tooltip )
delete this.tooltips.types[type];
const idx = this.__tokenizers.indexOf(tokenizer);
if ( idx !== -1 )
this.__tokenizers.splice(idx, 1);
return tokenizer;
}
addRichProvider(provider) {
const type = provider.type;
@ -1869,12 +1887,39 @@ export default class Chat extends Module {
});
}
removeRichProvider(provider) {
let type;
if ( typeof provider === 'string' ) type = provider;
else type = provider.type;
tokenizeString(message, msg) {
provider = this.rich_providers[type];
if ( ! provider )
return null;
const idx = this.__rich_providers.indexOf(provider);
if ( idx !== -1 )
this.__rich_providers.splice(idx, 1);
return provider;
}
tokenizeString(message, msg, user, haltable = false) {
let tokens = [{type: 'text', text: message}];
for(const tokenizer of this.__tokenizers)
tokens = tokenizer.process.call(this, tokens, msg);
for(const tokenizer of this.__tokenizers) {
if ( ! tokenizer.process )
continue;
const new_tokens = tokenizer.process.call(this, tokens, msg, user, haltable);
if ( new_tokens )
tokens = new_tokens;
if ( haltable && msg.ffz_halt_tokens ) {
msg.ffz_halt_tokens = undefined;
break;
}
}
return tokens;
}
@ -1914,7 +1959,13 @@ export default class Chat extends Module {
let tokens = [{type: 'text', text: msg.message}];
for(const tokenizer of this.__tokenizers) {
tokens = tokenizer.process.call(this, tokens, msg, user, haltable);
if ( ! tokenizer.process )
continue;
const new_tokens = tokenizer.process.call(this, tokens, msg, user, haltable);
if ( new_tokens )
tokens = new_tokens;
if ( haltable && msg.ffz_halt_tokens ) {
msg.ffz_halt_tokens = undefined;
break;